diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..58a9120 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,20 @@ +version: 2 + +updates: + - package-ecosystem: "uv" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 2 + commit-message: + prefix: "build" + include: "scope" + rebase-strategy: "auto" + - package-ecosystem: github-actions + directory: / + commit-message: + prefix: "build" + include: "scope" + rebase-strategy: "auto" + schedule: + interval: "weekly" diff --git a/.github/labeler.yaml b/.github/labeler.yaml new file mode 100644 index 0000000..a049466 --- /dev/null +++ b/.github/labeler.yaml @@ -0,0 +1,19 @@ +dependencies: + - changed-files: + - any-glob-to-any-file: "uv.lock" + +github_actions: + - changed-files: + - any-glob-to-any-file: ".github/**" + +docs: + - changed-files: + - any-glob-to-any-file: "**/*.md" + +release: + - changed-files: + - any-glob-to-any-file: "CHANGELOG.md" + +tests: + - changed-files: + - any-glob-to-any-file: "tests/**" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0aa1459..f4b5709 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,65 +2,104 @@ name: CI on: push: - branches: - - main pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }} + cancel-in-progress: true env: - DEFAULT_PYTHON: "3.13" - DEFAULT_OS: ubuntu-latest + UV_VERSION: "0.9.4" jobs: + pre-commit: + runs-on: ubuntu-latest + name: Pre-commit hooks (lint/format/spell/type, all files) + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + version: ${{ env.UV_VERSION }} + + - name: Install dependencies + run: uv sync --all-groups + + - name: Run pre-commit + run: uv run pre-commit run --show-diff-on-failure --color=always --all-files --hook-stage push + + pytest: + name: Tests ${{ matrix.os }} / py${{ matrix.python }} + needs: pre-commit runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: - python-version: ["3.11", "3.12", "3.13"] os: [ubuntu-latest, windows-latest] - - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install ".[chronify,dev]" - - name: Run pytest with coverage - run: | - pytest -v --cov --cov-report=xml - - name: codecov - uses: codecov/codecov-action@v4.2.0 - if: ${{ matrix.os == env.DEFAULT_OS && matrix.python-version == env.DEFAULT_PYTHON }} - with: - token: ${{ secrets.CODECOV_TOKEN }} - name: infrasys-tests - fail_ci_if_error: false - verbose: true - mypy: - runs-on: ubuntu-latest - name: "mypy" + python: ["3.11", "3.12", "3.13"] + defaults: + run: + shell: bash steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v3 - - name: Set up Python - uses: actions/setup-python@v5 + uses: astral-sh/setup-uv@v7 with: - python-version-file: "pyproject.toml" - - name: Installing dependencies - run: uv sync --dev - - name: Run mypy + enable-cache: true + version: ${{ env.UV_VERSION }} + + - name: Set up Python ${{ matrix.python }} + run: uv python install ${{ matrix.python }} + + - name: Install dependencies + run: uv sync --all-groups + + - name: Running package tests run: | - uv run mypy --config-file=pyproject.toml --ignore-missing-imports src/ - ruff: + uv run pytest --cov --cov-report=xml + + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + package: + name: Package smoke test + needs: pytest runs-on: ubuntu-latest - name: "ruff" steps: - - uses: actions/checkout@v4 - - uses: chartboost/ruff-action@v1 + - name: Checkout + uses: actions/checkout@v6 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - src: "./src" + version: ${{ env.UV_VERSION }} + enable-cache: true + + - name: Install dependencies + run: uv sync --all-groups + + - name: Build and install wheel + run: | + uv build + python -m venv pkgtest + source pkgtest/bin/activate + python -m pip install --upgrade pip + python -m pip install dist/*.whl + python -c "import infrasys as m; print(getattr(m, '__version__', 'OK'))" diff --git a/.github/workflows/commit.yaml b/.github/workflows/commit.yaml new file mode 100644 index 0000000..38b2d16 --- /dev/null +++ b/.github/workflows/commit.yaml @@ -0,0 +1,46 @@ +name: on-commit + +on: + pull_request: + types: [opened, reopened, synchronize] + +jobs: + labeler: + name: apply labels + permissions: + contents: read + pull-requests: write + issues: write + runs-on: [ubuntu-latest] + steps: + - uses: actions/checkout@v6 + - uses: actions/labeler@v6.0.1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + configuration-path: .github/labeler.yaml + sync-labels: true + + lint-commit-messages: + name: lint commit message + runs-on: [ubuntu-latest] + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 0 + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Commitizen check + run: | + uvx --from commitizen cz check --rev-range HEAD^! + lint-pr-title: + # default: lints titles using https://github.com/commitizen/conventional-commit-types + name: lint pr title + runs-on: [ubuntu-latest] + permissions: + pull-requests: read + steps: + - uses: amannn/action-semantic-pull-request@v6.1.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index d93cc6e..52175ce 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -9,23 +9,22 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: select python version - uses: actions/setup-python@v5 + + - name: Install uv + uses: astral-sh/setup-uv@v6 with: - python-version: "3.11" - - name: install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install ".[chronify,dev]" - - name: build documentation - run: | - cd docs - make clean - make html - - name: deploy + version: "latest" + + - name: Install the project + run: uv sync --group docs + + - name: Build Sphinx documentation + run: uv run sphinx-build docs/source/ docs/_build/ + + - name: Deploy on GitHub Pages uses: peaceiris/actions-gh-pages@v3.6.1 with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs/_build/html + publish_dir: ./docs/_build/ force_orphan: true full_commit_message: ${{ github.event.head_commit.message }} diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml deleted file mode 100644 index c20fa5f..0000000 --- a/.github/workflows/publish_to_pypi.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Upload to PyPi -on: - release: - types: [published] - -jobs: - pypi-publish: - name: Upload release to PyPI - runs-on: ubuntu-latest - permissions: - id-token: write - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install build - - name: Build and publish - run: | - python -m build - - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..b122c6c --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,103 @@ +name: release-please +on: + push: + branches: + - main + +concurrency: + group: release-please + cancel-in-progress: true + +permissions: + contents: write + pull-requests: write + id-token: write + +env: + UV_VERSION: "0.9.4" + +jobs: + release-please: + outputs: + release_created: ${{ steps.release.outputs.release_created }} + release_tag: ${{ steps.release.outputs.tag_name }} + runs-on: ubuntu-latest + steps: + - name: Run release-please + id: release + uses: googleapis/release-please-action@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + config-file: .release-please-config.json + manifest-file: .release-please-manifest.json + target-branch: main + + build: + name: Build + needs: release-please + if: needs.release-please.outputs.release_created + runs-on: ubuntu-latest + steps: + - name: Checkout release commit + uses: actions/checkout@v6 + with: + ref: ${{ needs.release-please.outputs.release_tag }} + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + version: ${{ env.UV_VERSION }} + enable-cache: true + + - name: Install dependencies + run: uv sync --all-groups + + - name: Build package + run: uv build + + - name: Store the distribution packages + uses: actions/upload-artifact@v5 + with: + name: python-package-distributions + path: dist/ + + publish-testpypi: + runs-on: ubuntu-latest + needs: build + environment: + name: testpypi + url: https://test.pypi.org/p/infrasys + steps: + - name: Download all the dists + uses: actions/download-artifact@v6 + with: + name: python-package-distributions + path: dist/ + + - name: Publish package to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + + publish-pypi: + needs: + - build + - publish-testpypi + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/infrasys + steps: + - name: Download all the dists + uses: actions/download-artifact@v6 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution πŸ“¦ to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 025a30a..39675e1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,15 +1,68 @@ +default_stages: + - pre-commit + +default_install_hook_types: + - pre-commit + - commit-msg + - pre-push + repos: -- repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.2.1 - hooks: - # Run the linter. - - id: ruff - args: [ --fix ] - # Run the formatter. - - id: ruff-format -- repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.13.0 - hooks: - - id: mypy - language: system + - repo: local + hooks: + - id: ruff-format + name: ruff format + entry: uv run ruff format --verbose + language: system + types_or: [python, pyi] + + - id: ruff-check + name: ruff check + entry: uv run ruff check --fix --config=pyproject.toml + language: system + types_or: [python, pyi] + + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + - id: check-merge-conflict + - id: check-yaml + - id: check-toml + - id: check-json + - id: check-case-conflict + + - repo: https://github.com/commitizen-tools/commitizen + rev: v4.10.0 + hooks: + - id: commitizen + stages: + - commit-msg + + - repo: local + hooks: + - id: pytest + name: pytest (quick) + entry: uv run pytest -q -m "not slow" --maxfail=1 --disable-warnings + language: system + types: [python] + pass_filenames: false + stages: [pre-push] + + + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.9.4 + hooks: + - id: uv-lock + + - repo: local + hooks: + - id: mypy + name: mypy + entry: uv run mypy --config-file=pyproject.toml src/infrasys + language: system + types: [python] + pass_filenames: false + stages: [manual] diff --git a/.release-please-config.json b/.release-please-config.json new file mode 100644 index 0000000..80e8aad --- /dev/null +++ b/.release-please-config.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "bump-minor-pre-major": true, + "draft": false, + "extra-files": [ + { + "jsonpath": "$.package[?(@.name.value=='infrasys')].version", + "path": "uv.lock", + "type": "toml" + } + ], + "include-component-in-tag": false, + "package-name": "infrasys", + "prerelease": false, + "release-type": "python" + } + } +} diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..0074ec3 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.0.0rc3" +} diff --git a/README.md b/README.md index 07f5055..6bb3d0b 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,105 @@ # infrasys -[![CI](https://github.com/NREL/infrasys/workflows/CI/badge.svg)](https://github.com/NREL/infrasys/actions/workflows/ci.yml) -[![codecov](https://codecov.io/gh/NREL/infrasys/branch/main/graph/badge.svg)](https://codecov.io/gh/NREL/infrasys) - -This package implements a data store for components and time series in support of Python-based -modeling packages. While it is designed to support teams modeling transmission and distribution -systems for electrical grids, it can be used by any package that needs to store components -(e.g., generators and buses) that have quantities (e.g., power and voltage) which may vary over -time. - -The package was inspired by -[InfrastructureSystems.jl](https://github.com/NREL-Sienna/InfrastructureSystems.jl) - -## Benefits -- Stores components in data structures that provide fast lookup and iteration by type and name. -- Provides extendable data models that enable validation and unit conversion through -[pint](https://pint.readthedocs.io/en/stable/). -- Manages time series data efficiently. Data is only loaded into system memory when needed by -the user application. -- Manages serialization and de-serialization of components to JSON, including automatic handling of -nested objects. -- Enables data model migration. - -## Package Developer Guide -🚧 - -## Installation -``` -$ pip install git+ssh://git@github.com/NREL/infrastructure_systems.git@main +[![CI](https://github.com/NREL/infrasys/actions/workflows/ci.yml/badge.svg)](https://github.com/NREL/infrasys/actions/workflows/ci.yml) +[![Codecov](https://codecov.io/gh/NREL/infrasys/branch/main/graph/badge.svg)](https://codecov.io/gh/NREL/infrasys) +[![PyPI](https://img.shields.io/pypi/v/infrasys.svg)](https://pypi.org/project/infrasys/) +[![Ruff](https://img.shields.io/badge/Ruff->=_0.0-blue?logo=ruff&logoColor=white)](https://github.com/charliermarsh/ruff) +[![Python](https://img.shields.io/badge/python-3.11%2B-3776ab?logo=python)](https://www.python.org/) +[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-%23FE5196?logo=conventionalcommits&logoColor=white)](https://conventionalcommits.org) +[![License](https://img.shields.io/badge/license-BSD%203--Clause-blue)](LICENSE.txt) +[![Docs](https://img.shields.io/badge/docs-GitHub-blue)](https://github.com/NREL/infrasys/tree/main/docs) +[![release-please](https://github.com/NREL/infrasys/actions/workflows/release.yaml/badge.svg)](https://github.com/NREL/infrasys/actions/workflows/release.yaml) +[![Docs (GitHub Pages)](https://github.com/NREL/infrasys/actions/workflows/gh-pages.yml/badge.svg)](https://github.com/NREL/infrasys/actions/workflows/gh-pages.yml) + +infrasys is a lightweight data store that keeps track of components, their attributes, and +time series for energy infrastructure models. The core package is opinionated about validation, +unit handling, and data migration so that downstream modeling packages can focus on solving +their domain problems instead of managing persistence concerns. + +## Highlights + +- **Typed components with pint validation:** Base models derive from `pydantic` and use + `pint` quantities whenever a physical unit is involved. +- **Flexible time-series storage:** Arrow, HDF5, Chronify, and in-memory backends are available + via `System` configuration to match your compute environment. +- **Efficient serialization:** Components, supplemental attributes, and nested systems are + serialized to JSON with automatic metadata and optional migration hooks. +- **Designed for extension:** Derive your own `System` classes, override component addition + logic, or ship supplemental attributes alongside the core storage. + +## Getting started + +### Install + +```bash +pip install git+https://github.com/NREL/infrasys.git@main +pip install "infrasys[chronify]" # optional backend for Chronify/duckdb-based storage ``` -## Developer installation +Don’t forget to install pre-commit hooks so your push meets project quality checks: + +```bash +pre-commit install ``` -$ pip install -e ".[dev]" + +### Quick example + +```python +from infrasys import Component, System +from infrasys.location import Location + + +class Bus(Component): + voltage: float + location: Location | None = None + + +system = System(name="demo-grid") +bus = Bus(name="bus-1", voltage=1.05, location=Location(x=0.0, y=0.0)) +system.add_components(bus) +system.to_json("demo-grid/system.json") ``` -Please install `pre-commit` so that your code is checked before making commits. +Instantiate a `System`, add a few components, and dump everything to JSON. Time series data +gets written to a sibling directory alongside the JSON file so you can externalize it with +`System.to_json(...)` and `System.from_json(...)`. + +## Documentation + +- **How To guides:** step-by-step recipes in `docs/how_tos`. +- **Tutorials:** opinionated walkthroughs for custom systems under `docs/tutorials`. +- **API Reference:** auto-generated reference material lives in `docs/reference`. +- **Explanation articles:** deeper dives on the storage backends, migrations, and behavior in + `docs/explanation`. + +To build the docs locally, install `docs` extras and run `make html` from the `docs` directory. + +## Development + +- Clone this repository and install the dev dependency group before hacking: + +```bash +pip install -e ".[dev]" ``` -$ pre-commit install + +- Run the test suite and coverage reporting via: + +```bash +pytest ``` +- Formatting and linting are managed by `ruff` and configured through its `pyproject.toml` section. + Keep your hooks healthy by installing them via `pre-commit install` (see Getting started) and running + `pre-commit run --all-files` before pushing. + +## Support & Contribution + +infrasys is being developed under NREL Software Record SWR-24-42. Report issues and feature +requests at [https://github.com/NREL/infrasys/issues](https://github.com/NREL/infrasys/issues). +Review the `docs/reference` and `docs/how_tos` material before submitting a change so your +diff is aligned with the project conventions. + ## License -infrasys is released under a BSD 3-Clause -[License](https://github.com/NREL/infrasys/blob/main/LICENSE.txt). -infrasys was developed under software record SWR-24-42 at the National Renewable Energy Laboratory -([NREL](https://www.nrel.gov)). +infrasys is released under the BSD 3-Clause License. See +[LICENSE.txt](LICENSE.txt) for details. diff --git a/docs/explanation/time_series.md b/docs/explanation/time_series.md index ff6d114..5e052d4 100644 --- a/docs/explanation/time_series.md +++ b/docs/explanation/time_series.md @@ -1,17 +1,18 @@ # Time Series + Infrastructure systems supports time series data expressed as a one-dimensional array of floats -using the class [SingleTimeSeries](#singe-time-series-api). Users must provide a `variable_name` +using the class {py:class}`infrasys.time_series_models.SingleTimeSeries`. Users must provide a `name` that is typically the field of a component being modeled. For example, if the user has a time array associated with the active power of a generator, they would assign -`variable_name = "active_power"`. +`name = "active_power"`. -Here is an example of how to create an instance of `SingleTimeSeries`: +Here is an example of how to create an instance of {py:class}`infrasys.time_series_models.SingleTimeSeries`: ```python import random time_series = SingleTimeSeries.from_array( data=[random.random() for x in range(24)], - variable_name="active_power", + name="active_power", initial_time=datetime(year=2030, month=1, day=1), resolution=timedelta(hours=1), ) @@ -23,7 +24,7 @@ there might be different profiles for different scenarios or model years. ```python time_series = SingleTimeSeries.from_array( data=[random.random() for x in range(24)], - variable_name="active_power", + name="active_power", initial_time=datetime(year=2030, month=1, day=1), resolution=timedelta(hours=1), scenario="high", @@ -31,12 +32,92 @@ there might be different profiles for different scenarios or model years. ) ``` +## Deterministic Time Series + +In addition to `SingleTimeSeries`, infrasys also supports deterministic time series, +which are used to represent forecasts or scenarios with a known future. + +The {py:class}`infrasys.time_series_models.Deterministic` class represents a time series where +the data is explicitly stored as a 2D array, with each row representing a forecast window and +each column representing a time step within that window. + +You can create a Deterministic time series in two ways: + +1. **Explicitly with forecast data** using `Deterministic.from_array()` when you have pre-computed forecast values. +2. **From a SingleTimeSeries** using `Deterministic.from_single_time_series()` to create a "perfect forecast" based on historical data by extracting overlapping windows. + +### Creating Deterministic Time Series with Explicit Data + +This approach is used when you have explicit forecast data available. Each forecast window is stored as a row in a 2D array. + +Example: + +```python +import numpy as np +from datetime import datetime, timedelta +from infrasys.time_series_models import Deterministic +from infrasys.quantities import ActivePower + +initial_time = datetime(year=2020, month=9, day=1) +resolution = timedelta(hours=1) +horizon = timedelta(hours=8) # 8 hours horizon (8 values per forecast) +interval = timedelta(hours=1) # 1 hour between forecasts +window_count = 3 # 3 forecast windows + +# Create forecast data as a 2D array where: +# - Each row is a forecast window +# - Each column is a time step in the forecast horizon +forecast_data = [ + [100.0, 101.0, 101.3, 90.0, 98.0, 87.0, 88.0, 67.0], # 2020-09-01T00 forecast + [101.0, 101.3, 99.0, 98.0, 88.9, 88.3, 67.1, 89.4], # 2020-09-01T01 forecast + [99.0, 67.0, 89.0, 99.9, 100.0, 101.0, 112.0, 101.3], # 2020-09-01T02 forecast +] + +# Create the data with units +data = ActivePower(np.array(forecast_data), "watts") +name = "active_power_forecast" +ts = DeterministicTimeSeries.from_array( +# Create the data with units +data = ActivePower(np.array(forecast_data), "watts") +name = "active_power_forecast" +ts = Deterministic.from_array( + data, name, initial_time, resolution, horizon, interval, window_count +) +``` + +### Creating "Perfect Forecasts" from SingleTimeSeries + +The `from_single_time_series()` classmethod is useful when you want to create a "perfect forecast" based on historical data for testing or validation purposes. It extracts overlapping forecast windows from an existing `SingleTimeSeries`. + +Example: + +```python +from datetime import datetime, timedelta +from infrasys.time_series_models import Deterministic, SingleTimeSeries + +initial_timestamp = datetime(year=2020, month=1, day=1) +name = "active_power" +ts = SingleTimeSeries.from_array( + data=range(8784), + name=name, + resolution=timedelta(hours=1), + initial_timestamp=initial_timestamp, +) +horizon = timedelta(hours=8) +interval = timedelta(hours=1) +ts_deterministic = Deterministic.from_single_time_series( + ts, interval=interval, horizon=horizon +) +``` + +In this example, `ts_deterministic` creates a forecast for `active_power` by extracting forecast windows from the original `SingleTimeSeries` `ts` at different offsets determined by `interval` and `horizon`. The forecast data is materialized as a 2D array where each row is a forecast window. + ## Resolution Infrastructure systems support two types of objects for passing the resolution: :class:`datetime.timedelta` and :class:`dateutil.relativedelta.relativedelta`. These types allow users to define durations with varying levels of granularity -and semantic meaning. +and semantic meaning. While `timedelta` is best suited for precise, fixed-length intervals (e.g., seconds, minutes, hours, days), `relativedelta` is more appropriate for calendar-aware durations such as months or years, which do not @@ -52,17 +133,18 @@ For example, a `timedelta` of 1 month will be converted to the ISO format string `P1M` and a `timedelta` of 1 hour will be converted to `P0DT1H`. ## Behaviors + Users can customize time series behavior with these flags passed to the `System` constructor: - `time_series_in_memory`: The `System` stores each array of data in an Arrow file by default. This -is a binary file that enables efficient storage and row access. Set this flag to store the data in -memory instead. + is a binary file that enables efficient storage and row access. Set this flag to store the data in + memory instead. - `time_series_read_only`: The default behavior allows users to add and remove time series data. -Set this flag to disable mutation. That can be useful if you are de-serializing a system, won't be -changing it, and want to avoid copying the data. + Set this flag to disable mutation. That can be useful if you are de-serializing a system, won't be + changing it, and want to avoid copying the data. - `time_series_directory`: The `System` stores time series data on the computer's tmp filesystem by -default. This filesystem may be of limited size. If your data will exceed that limit, such as what -is likely to happen on an HPC compute node, set this parameter to an alternate location (such as -`/tmp/scratch` on NREL's HPC systems). + default. This filesystem may be of limited size. If your data will exceed that limit, such as what + is likely to happen on an HPC compute node, set this parameter to an alternate location (such as + `/tmp/scratch` on NREL's HPC systems). Refer to the [Time Series API](#time-series-api) for more information. diff --git a/docs/how_tos/index.md b/docs/how_tos/index.md index f54e45d..ad04f3a 100644 --- a/docs/how_tos/index.md +++ b/docs/how_tos/index.md @@ -1,6 +1,7 @@ ```{eval-rst} .. _how-tos-page: ``` + # How Tos ```{eval-rst} diff --git a/docs/how_tos/list_time_series.md b/docs/how_tos/list_time_series.md index 08059f1..26f3c07 100644 --- a/docs/how_tos/list_time_series.md +++ b/docs/how_tos/list_time_series.md @@ -24,9 +24,9 @@ system.add_components(bus, gen) length = 10 initial_time = datetime(year=2020, month=1, day=1) timestamps = [initial_time + timedelta(hours=i) for i in range(length)] -variable_name = "active_power" -ts1 = SingleTimeSeries.from_time_array(np.random.rand(length), variable_name, timestamps) -ts2 = SingleTimeSeries.from_time_array(np.random.rand(length), variable_name, timestamps) +name = "active_power" +ts1 = SingleTimeSeries.from_time_array(np.random.rand(length), name, timestamps) +ts2 = SingleTimeSeries.from_time_array(np.random.rand(length), name, timestamps) key1 = system.add_time_series(ts1, gen, scenario="low") key2 = system.add_time_series(ts2, gen, scenario="high") @@ -38,17 +38,19 @@ ts2_b = system.get_time_series_by_key(gen, key2) for key in system.list_time_series_keys(gen): print(f"{gen.label}: {key}") ``` + ``` -SimpleGenerator.gen: variable_name='active_power' initial_time=datetime.datetime(2020, 1, 1, 0, 0) resolution=datetime.timedelta(seconds=3600) time_series_type= user_attributes={'scenario': 'high'} length=10 -SimpleGenerator.gen: variable_name='active_power' initial_time=datetime.datetime(2020, 1, 1, 0, 0) resolution=datetime.timedelta(seconds=3600) time_series_type= user_attributes={'scenario': 'low'} length=10 +SimpleGenerator.gen: name='active_power' initial_time=datetime.datetime(2020, 1, 1, 0, 0) resolution=datetime.timedelta(seconds=3600) time_series_type= user_attributes={'scenario': 'high'} length=10 +SimpleGenerator.gen: name='active_power' initial_time=datetime.datetime(2020, 1, 1, 0, 0) resolution=datetime.timedelta(seconds=3600) time_series_type= user_attributes={'scenario': 'low'} length=10 ``` You can also retrieve time series by specifying the parameters as shown here: ```python -system.time_series.get(gen, variable_name="active_power", scenario="high") +system.time_series.get(gen, name="active_power", scenario="high") ``` + ``` -SingleTimeSeries(variable_name='active_power', normalization=None, data=array([0.29276233, 0.97400382, 0.76499075, 0.95080431, 0.61749027, +SingleTimeSeries(name='active_power', normalization=None, data=array([0.29276233, 0.97400382, 0.76499075, 0.95080431, 0.61749027, 0.73899945, 0.57877704, 0.3411286 , 0.80701393, 0.53051773]), resolution=datetime.timedelta(seconds=3600), initial_time=datetime.datetime(2020, 1, 1, 0, 0), length=10) ``` diff --git a/docs/how_tos/storage_backends.md b/docs/how_tos/storage_backends.md new file mode 100644 index 0000000..fd38900 --- /dev/null +++ b/docs/how_tos/storage_backends.md @@ -0,0 +1,374 @@ +# How to Use Different Storage Backends for Time Series Data + +This guide explains how to use the different storage backends available in Infrasys for time series data. The backend you choose affects how time series data is stored and accessed throughout the system lifecycle. + +## Available Storage Backends + +Infrasys offers four different storage backends: + +1. **In-Memory Storage** ({py:class}`~infrasys.in_memory_time_series_storage.InMemoryTimeSeriesStorage`): Stores time series data entirely in memory +2. **Arrow Storage** ({py:class}`~infrasys.arrow_storage.ArrowTimeSeriesStorage`): Stores time series data in Apache Arrow files on disk +3. **Chronify Storage** ({py:class}`~infrasys.chronify_time_series_storage.ChronifyTimeSeriesStorage`): Stores time series data in a SQL database using the Chronify library +4. **HDF5 Storage** (`HDF5TimeSeriesStorage`): Stores time series data in HDF5 files (available in development version) + +## Choosing a Storage Backend + +You can choose the storage backend when creating a {py:class}`~infrasys.system.System` by setting the `time_series_storage_type` parameter: + +```python +from infrasys import System +from infrasys.time_series_models import TimeSeriesStorageType + +# Create a system with in-memory storage +system_memory = System(time_series_storage_type=TimeSeriesStorageType.MEMORY) + +# Create a system with Arrow storage (default) +system_arrow = System(time_series_storage_type=TimeSeriesStorageType.ARROW) + +# Create a system with Chronify storage +system_chronify = System(time_series_storage_type=TimeSeriesStorageType.CHRONIFY) + +# Create a system with HDF5 storage (development version) +system_hdf5 = System(time_series_storage_type=TimeSeriesStorageType.HDF5) +``` + +```{note} +If you don't specify a storage type, Arrow storage is used by default. +``` + +## Storage Directory Configuration + +For file-based storage backends (Arrow and Chronify), you can specify where the time series data will be stored: + +```python +from pathlib import Path +from infrasys import System + +# Use a specific directory for time series data +custom_dir = Path("/path/to/your/storage/directory") +system = System(time_series_directory=custom_dir) +``` + +```{tip} +If `time_series_directory` is not specified, a temporary directory will be created automatically. This directory will be cleaned up when the Python process exits. +``` + +```{warning} +If your time series data is in the range of GBs, you may need to specify an alternate location because the tmp filesystem may be too small. +``` + +## Converting Between Storage Types + +You can convert between storage types during runtime using the `convert_storage` method: + +```python +from infrasys.time_series_models import TimeSeriesStorageType + +# Convert from in-memory to Arrow storage +system.convert_storage(time_series_storage_type=TimeSeriesStorageType.ARROW) + +# Convert from Arrow to Chronify storage +system.convert_storage(time_series_storage_type=TimeSeriesStorageType.CHRONIFY) +``` + +Here's a complete example of converting storage backends: + +```python +from datetime import datetime, timedelta +import numpy as np +from infrasys.time_series_models import TimeSeriesStorageType, SingleTimeSeries +from infrasys import System +from tests.models.simple_system import SimpleSystem, SimpleBus, SimpleGenerator + +# Create a system with in-memory storage +system = SimpleSystem(time_series_storage_type=TimeSeriesStorageType.MEMORY) + +# Add components +bus = SimpleBus(name="test-bus", voltage=1.1) +generator = SimpleGenerator(name="gen", active_power=1.0, rating=1.0, bus=bus, available=True) +system.add_components(bus, generator) + +# Create and add time series data +ts_data = SingleTimeSeries( + data=np.arange(24), + name="active_power", + resolution=timedelta(hours=1), + initial_timestamp=datetime(2020, 1, 1), +) +system.add_time_series(ts_data, generator, scenario="baseline") + +# Verify storage type +print(f"Current storage type: {type(system._time_series_mgr._storage).__name__}") +# Output: Current storage type: InMemoryTimeSeriesStorage + +# Convert to Arrow storage +system.convert_storage(time_series_storage_type=TimeSeriesStorageType.ARROW) + +# Verify new storage type +print(f"New storage type: {type(system._time_series_mgr._storage).__name__}") +# Output: New storage type: ArrowTimeSeriesStorage + +# Verify time series data is still accessible +ts = system.get_time_series(generator, variable_name="active_power", scenario="baseline") +print(f"Time series data preserved: {ts.data_array}") +# Output: Time series data preserved: [0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23] +``` + +```{tip} +Converting between storage types preserves all time series data. This can be useful when you need to optimize performance by switching storage strategies during different phases of your application. +``` + +## Choosing the Right Backend for Your Use Case + +Each storage backend has different characteristics that make it suitable for different use cases: + +### In-Memory Storage + +**Best for:** + +- Small datasets +- Quick prototyping and testing +- Temporary data that doesn't need to persist + +**Characteristics:** + +- Fastest access time +- Data is lost when the program exits +- Limited by available RAM + +```python +system = System(time_series_storage_type=TimeSeriesStorageType.MEMORY) +``` + +### Arrow Storage + +**Best for:** + +- Datasets of any size +- Persistence across program runs +- Efficient file-based storage and retrieval +- Creates one file per time series array. + +```{warning} +This can be problematic on HPC shared filesystems if the number of arrays is is greater than 10,000. +``` + +**Characteristics:** + +- Fast file-based storage using Apache Arrow format +- Good balance of speed and persistence +- Default storage backend + +```python +system = System(time_series_storage_type=TimeSeriesStorageType.ARROW) +``` + +### Chronify Storage + +**Best for:** + +- Complex time series data with relationships +- When SQL queries are needed +- Integration with database systems + +**Characteristics:** + +- Uses a SQL database via the Chronify library +- Supports transactional operations +- More powerful query capabilities + +```python +system = System(time_series_storage_type=TimeSeriesStorageType.CHRONIFY) +``` + +### HDF5 Storage + +**Best for:** + +- Scientific datasets with three or more dimensions +- Data that benefits from HDF5's compression capabilities +- Systems with tens or hundreds of thousands of time series arrays +- Stores all time series arrays in one file. + +**Characteristics:** + +- Uses HDF5 file format, popular in scientific computing +- Supports hierarchical organization of data +- Good compression capabilities +- Compatible with [PowerSystems.jl](https://github.com/NREL-Sienna/PowerSystems.jl) + +```python +system = System(time_series_storage_type=TimeSeriesStorageType.HDF5) +``` + +```{note} +HDF5 storage is currently available in the development version only. +``` + +## Working with Time Series Data + +Regardless of the backend you choose, the API for adding, retrieving, and using time series data remains the same: + +```python +from datetime import datetime, timedelta +import numpy as np +from infrasys.time_series_models import SingleTimeSeries +from tests.models.simple_system import SimpleSystem, SimpleGenerator, SimpleBus + +# Create a system with your chosen backend +system = SimpleSystem(time_series_storage_type=TimeSeriesStorageType.ARROW) + +# Add components +bus = SimpleBus(name="test-bus", voltage=1.1) +generator = SimpleGenerator(name="gen", active_power=1.0, rating=1.0, bus=bus, available=True) +system.add_components(bus, generator) + +# Create and add time series data +length = 24 +initial_time = datetime(year=2020, month=1, day=1) +resolution = timedelta(hours=1) +data = np.random.rand(length) + +# Create a time series +time_series = SingleTimeSeries( + data=data, + name="active_power", + resolution=resolution, + initial_timestamp=initial_time +) + +# Add the time series to a component +system.add_time_series(time_series, generator, scenario="baseline") + +# Retrieve the time series later +retrieved_ts = system.get_time_series( + generator, + name="active_power", + scenario="baseline" +) +``` + +## Read-Only Mode + +For any storage backend, you can set it to read-only mode, which is useful when +you're working with existing data that won't or shouldn't be modified. For +example, suppose you want to load a system with GBs of time series data. By +default, infrasys will make a copy of the time series data during +de-serialization. If you set `time_series_read_only=True`, infrasys will skip +that copy operation. + +```python +system = System(time_series_read_only=True) +``` + +```{warning} +In read-only mode, attempts to add or modify time series data will raise exceptions. +``` + +## Serializing and Deserializing a System + +When saving a system to disk, all the time series data will be properly serialized regardless of the backend used: + +```python +from pathlib import Path + +# Save the entire system (including time series data) +output_dir = Path("my_system_data") +system.to_json(output_dir) + +# To load the system back +loaded_system = SimpleSystem.from_json(output_dir) +``` + +```{note} +The storage backend information is preserved when saving and loading a system. +``` + +## Performance Considerations + +Each storage backend offers different trade-offs in terms of performance: + +- **Memory Usage**: In-memory storage keeps all data in RAM, which can be a limitation for large datasets +- **Disk Space**: Arrow, Chronify, and HDF5 storage use disk space, with different compression characteristics +- **Access Speed**: In-memory is fastest, followed by Arrow/HDF5, then Chronify (depending on the specific operation) +- **Query Flexibility**: Chronify offers the most complex query capabilities through SQL +- **Serialization/Deserialization Speed**: Arrow typically offers the fastest serialization for time series data + +### Relative Performance Comparison + +The table below gives a general comparison of the different storage backends (scale of 1-5, where 5 is best): + +| Storage Type | Read Speed | Write Speed | Memory Usage | Disk Usage | Query Capabilities | +| ------------ | ---------- | ----------- | ------------ | ---------- | ------------------ | +| In-Memory | 5 | 5 | 1 | N/A | 2 | +| Arrow | 4 | 4 | 4 | 3 | 3 | +| Chronify | 2 | 3 | 4 | 3 | 5 | +| HDF5 | 3 | 3 | 4 | 4 | 3 | + +```{note} +The above table is a generalization. Actual performance will depend on your specific dataset characteristics, hardware, and operations being performed. +``` + +### Benchmarking Your Use Case + +For critical applications, it's recommended to benchmark different storage backends with your specific data patterns: + +```python +import time +from datetime import datetime, timedelta +import numpy as np +from infrasys.time_series_models import TimeSeriesStorageType, SingleTimeSeries +from infrasys import System + +# Function to benchmark storage operations +def benchmark_storage(storage_type, data_size=10000): + # Setup + system = System(time_series_storage_type=storage_type) + + # Generate test data + data = np.random.random(data_size) + ts = SingleTimeSeries( + data=data, + name="test_variable", + resolution=timedelta(hours=1), + initial_timestamp=datetime(2020, 1, 1), + ) + + # Benchmark write + start_time = time.time() + system.add_time_series(ts, system) + write_time = time.time() - start_time + + # Benchmark read + start_time = time.time() + retrieved_ts = system.get_time_series(system, name="test_variable") + read_time = time.time() - start_time + + return {"write_time": write_time, "read_time": read_time} + +# Run benchmarks +results = {} +for storage_type in [ + TimeSeriesStorageType.MEMORY, + TimeSeriesStorageType.ARROW, + TimeSeriesStorageType.CHRONIFY +]: + results[storage_type.name] = benchmark_storage(storage_type) + +# Print results +for name, times in results.items(): + print(f"{name} - Write: {times['write_time']:.6f}s, Read: {times['read_time']:.6f}s") +``` + +Choose the storage backend that best meets your specific requirements for memory usage, persistence, access patterns, and query complexity. + +## Summary + +The Infrasys library provides multiple storage backends for time series data, each optimized for different use cases: + +1. **In-Memory Storage**: Fastest but limited by RAM and lacks persistence +2. **Arrow Storage**: Good balance of speed and persistence, using Apache Arrow files +3. **Chronify Storage**: SQL-based storage with powerful query capabilities and time mappings. +4. **HDF5 Storage**: Hierarchical storage format compatible with [PowerSystems.jl](https://github.com/NREL-Sienna/PowerSystems.jl) + +All storage backends implement the same interface, making it easy to switch between them as your needs change. The choice of storage backend doesn't affect how you interact with the time series data through the Infrasys API, but it can significantly impact performance and resource utilization. diff --git a/docs/reference/api/quantities.md b/docs/reference/api/quantities.md index 7423be3..432e6ca 100644 --- a/docs/reference/api/quantities.md +++ b/docs/reference/api/quantities.md @@ -21,4 +21,3 @@ .. automodule:: infrasys.quantities :members: ``` - diff --git a/docs/reference/api/time_series.md b/docs/reference/api/time_series.md index 0e3c7f5..7e6ae44 100644 --- a/docs/reference/api/time_series.md +++ b/docs/reference/api/time_series.md @@ -16,3 +16,17 @@ .. autopydantic_model:: infrasys.time_series_models.SingleTimeSeries :members: ``` + +```{eval-rst} +.. _deterministic-time-series-api: +``` + +```{eval-rst} +.. autopydantic_model:: infrasys.time_series_models.DeterministicTimeSeries + :members: +``` + +```{eval-rst} +.. autopydantic_model:: infrasys.time_series_models.DeterministicSingleTimeSeries + :members: +``` diff --git a/docs/reference/benchmarks.md b/docs/reference/benchmarks.md new file mode 100644 index 0000000..ef273f7 --- /dev/null +++ b/docs/reference/benchmarks.md @@ -0,0 +1,46 @@ +# Running benchmarks + +We created some scripts that track some of the software decision. + +`scripts` + +How to run + +```console +uv run pytest scripts/$SCRIPT +``` + +# How to Benchmark Different Python Implementations with `pytest-benchmark` + +This guide will walk you through setting up and running performance benchmarks +using `pytest-benchmark`. Benchmarking is crucial for making informed decisions +about which libraries or implementation strategies offer the best performance +for your specific use cases. We'll use the common example of comparing two JSON +serialization libraries: the standard `json` and the faster `orjson`. + +## Why Benchmark? + +When you have multiple ways to achieve the same task (e.g., using different +libraries or algorithms), benchmarks provide quantitative data on their +performance. This data helps you: + +- Identify performance bottlenecks. +- Choose the most efficient library/method for critical code paths. +- Track performance regressions or improvements over time. +- Justify technical decisions with concrete evidence. + +## Prerequisites + +Before you start, make sure you have the following installed in your Python environment: + +1. **Python**: (e.g., Python 3.8+) +2. **`uv`**: Or your preferred Python package manager/runner. +3. **`pytest`**: The testing framework. +4. **`pytest-benchmark`**: The pytest plugin for benchmarking. +5. **`orjson`**: The alternative JSON library we'll be testing against (the standard `json` library is built-in). + +You can install the necessary Python packages using `uv`: + +```console +uv pip install pytest pytest-benchmark orjson +``` diff --git a/docs/tutorials/benchmark.md b/docs/tutorials/benchmark.md new file mode 100644 index 0000000..b2a9a01 --- /dev/null +++ b/docs/tutorials/benchmark.md @@ -0,0 +1,200 @@ +# How to Benchmark Different Python Implementations with `pytest-benchmark` + +```{note} +Most of this text was generated with AI. +``` + +This guide will walk you through setting up and running performance benchmarks +using `pytest-benchmark`. Benchmarking is crucial for making informed decisions +about which libraries or implementation strategies offer the best performance +for your specific use cases. We'll use the common example of comparing two JSON +serialization libraries: the standard `json` and the faster `orjson`. + +## Why benchmark? + +When you have multiple ways to achieve the same task (e.g., using different +libraries or algorithms), benchmarks provide quantitative data on their +performance. This data helps you: + +- Identify performance bottlenecks. +- Choose the most efficient library/method for critical code paths. +- Track performance regressions or improvements over time. +- Justify technical decisions with concrete evidence. + +## Prerequisites + +Before you start, make sure you have the following installed in your Python environment: + +1. **Python**: (e.g., Python 3.8+) +2. **`uv`**: Or your preferred Python package manager/runner. +3. **`pytest`**: The testing framework. +4. **`pytest-benchmark`**: The pytest plugin for benchmarking. +5. **`orjson`**: The alternative JSON library we'll be testing against (the + standard `json` library is built-in). + +You can install the necessary Python packages using `uv`: + +```console +uv pip install pytest pytest-benchmark orjson +``` + +# How to Benchmark Different Python Implementations with `pytest-benchmark`. + +This guide will walk you through setting up and running performance benchmarks +using `pytest-benchmark`. Benchmarking is crucial for making informed decisions +about which libraries or implementation strategies offer the best performance +for your specific use cases. We'll use the common example of comparing two JSON +serialization libraries: the standard `json` and the faster `orjson`. + +## Why Benchmark? + +When you have multiple ways to achieve the same task (e.g., using different +libraries or algorithms), benchmarks provide quantitative data on their +performance. This data helps you: + +- Identify performance bottlenecks. +- Choose the most efficient library/method for critical code paths. +- Track performance regressions or improvements over time. +- Justify technical decisions with concrete evidence. + +## Prerequisites + +Before you start, make sure you have the following installed in your Python environment: + +1. **Python**: (e.g., Python 3.8+) +2. **`uv`**: Or your preferred Python package manager/runner. +3. **`pytest`**: The testing framework. +4. **`pytest-benchmark`**: The pytest plugin for benchmarking. +5. **`orjson`**: The alternative JSON library we'll be testing against (the + standard `json` library is built-in). + +You can install the necessary Python packages using `uv`: + +```console +uv pip install pytest pytest-benchmark orjson +``` + +## Setting up Your Benchmark File + +1. Create a directory for your benchmark scripts. Following your project + structure, let's assume this is a `scripts/` directory. +2. Inside the `scripts/` directory, create a new Python file for your + benchmarks. For our JSON example, let's name it `test_json_performance.py`. + + ``` + project_root/ + └── scripts/ + └── test_json_performance.py + ``` + +## Writing Benchmark Functions + +In your `test_json_performance.py` file, you'll write functions that +`pytest-benchmark` can discover and run. Each function will test a specific +piece of code. + +Here's how to structure the benchmark for comparing `json.dumps` and `orjson.dumps`: + +```python +# scripts/test_json_performance.py + +import pytest +import json +import orjson + +# Sample data to be used for serialization +SAMPLE_DATA = { + "name": "Example User", + "email": "user@example.com", + "age": 30, + "is_active": True, + "balance": 1234.56, + "metadata": {"key" + str(i): "value" + str(i) for i in range(50)}, +} + +# Benchmark for the standard json library's dumps function +def benchmark_standard_json_dumps(benchmark): + """Benchmarks the standard json.dumps() function.""" + benchmark(json.dumps, SAMPLE_DATA) + +def benchmark_orjson_dumps(benchmark): + """Benchmarks the orjson.dumps() function.""" + benchmark(orjson.dumps, SAMPLE_DATA) + + +SERIALIZED_JSON_STD = json.dumps(SAMPLE_DATA) +SERIALIZED_JSON_ORJSON = orjson.dumps(SAMPLE_DATA) + + +def benchmark_standard_json_loads(benchmark): + benchmark(json.loads, SERIALIZED_JSON_STD) + + +def benchmark_orjson_loads(benchmark): + benchmark(orjson.loads, SERIALIZED_JSON_ORJSON) + +``` + +**Key points in the code:** + +- We import `pytest` and the libraries we want to test (`json`, `orjson`). +- `SAMPLE_DATA` provides a consistent input for all benchmarks. +- Each function starting with `benchmark_` is recognized by `pytest-benchmark`. +- The `benchmark` fixture (provided by `pytest-benchmark`) is passed as an argument to these functions. +- You call `benchmark(function_to_test, arg1, arg2, ...)` to run and measure + the `function_to_test` with its arguments. + +## Running the Benchmarks + +To run your benchmarks, navigate to your project's root directory in the +terminal and use the command structure you've established: + +```console +uv run pytest scripts/test_json_performance.py +``` + +If you have multiple benchmark files in the `scripts/` directory, you can run one by one. + +```console +uv run pytest scripts/{BENCHMARK}.py +``` + +## Understanding the output + +After running, `pytest-benchmark` will produce a table summarizing the +performance results. It will look something like this (the exact numbers will +vary based on your machine): + +| Name (time in us) | Min | Max | Mean | StdDev | Median | IQR | Outliers(\*) | Rounds | Iterations | +| ----------------------------- | -------------- | -------------- | -------------- | ------------- | -------------- | ------------- | ------------ | ------ | ---------- | +| benchmark_orjson_dumps | 3.8530 (1.0) | 6.5290 (1.0) | 4.3386 (1.0) | 0.3104 (1.0) | 4.2600 (1.0) | 0.3045 (1.0) | 64;95 | 22893 | 1 | +| benchmark_standard_json_dumps | 19.0930 (4.96) | 31.2950 (4.80) | 20.6635 (4.76) | 1.6072 (5.18) | 20.2170 (4.75) | 1.4480 (4.75) | 72;165 | 4633 | 1 | +| benchmark_orjson_loads | 3.3270 (1.0) | 5.8330 (1.0) | 3.6799 (1.0) | 0.3019 (1.0) | 3.6020 (1.0) | 0.2660 (1.0) | 101;111 | 26329 | 1 | +| benchmark_standard_json_loads | 6.8310 (2.05) | 11.2870 (1.94) | 7.5088 (2.04) | 0.7889 (2.61) | 7.2790 (2.02) | 0.6900 (2.59) | 84;116 | 12691 | 1 | + +**Key columns to look at:** + +- **Name:** The name of your benchmark function. +- **Min, Max, Mean, Median:** These are timings (often in microseconds, `us`, + or milliseconds, `ms`). **Lower values are better.** The `Mean` or `Median` are + often good general indicators. +- **StdDev:** Standard deviation, showing the variability of the measurements. + Lower is generally better, indicating more consistent performance. +- **Rounds:** How many times the core benchmark loop was run by + `pytest-benchmark` to gather statistics. +- **Iterations:** How many times your target function was called within each + round. +- **Ops/s (or Rounds/s):** Operations per second. **Higher values are better.** + (This column might not always be present by default or may be named differently + based on configuration, but "Min", "Mean", "Median" time are primary). + +The numbers in parentheses (e.g., `(1.0)`, `(4.96)`) next to the metrics for +`benchmark_orjson_dumps` show its performance relative to the baseline (the +fastest test, which is itself in this case). For +`benchmark_standard_json_dumps`, `(4.96)` next to its `Min` time means it was +4.96 times slower than the `Min` time of the fastest test +(`benchmark_orjson_dumps`). + +From the example output, you could conclude that `orjson` is significantly +faster than the standard `json` for both `dumps` and `loads` operations on this +particular `SAMPLE_DATA` and machine. diff --git a/docs/tutorials/custom_system.md b/docs/tutorials/custom_system.md index 2b6760d..5e06e1c 100644 --- a/docs/tutorials/custom_system.md +++ b/docs/tutorials/custom_system.md @@ -1,8 +1,9 @@ # Custom System + This tutorial describes how to create and use a custom system in a parent package. 1. Define the system. This example defines some custom attributes to illustrate serialization and -de-serialization behaviors. + de-serialization behaviors. ```python from typing import Any @@ -31,7 +32,7 @@ class CustomSystem(System): - The system's custom attribute `my_attribute` will be serialized and de-serialized automatically. - `infrasys` will call handle_data_format_upgrade during de-serialization so that this package -can handle format changes that might occur in the future. + can handle format changes that might occur in the future. 2. Define some component classes. @@ -76,11 +77,11 @@ class Generator(Component): **Notes**: - Each component defines the `example` method. This is highly recommended so that users can see -what a component might look like in the REPL. + what a component might look like in the REPL. - The `Bus` class implements a custom check when it is added to the system. It raises an exception -if its `Location` object is not already attached to the system. The same could be done for -generators and buses. + if its `Location` object is not already attached to the system. The same could be done for + generators and buses. 3. Build a system. @@ -98,7 +99,7 @@ gen = Generator(name="gen1", available=True, bus=bus, active_power=1.2, rating=1 system.add_components(location, bus, gen) time_series = SingleTimeSeries.from_array( data=[random.random() for x in range(24)], - variable_name="active_power", + name="active_power", initial_time=datetime(year=2030, month=1, day=1), resolution=timedelta(hours=1), ) diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index 7b0f80e..d82479e 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -1,6 +1,7 @@ ```{eval-rst} .. _tutorials-page: ``` + # Tutorials ```{eval-rst} @@ -9,3 +10,5 @@ :caption: Contents: custom_system + benchmark +``` diff --git a/pyproject.toml b/pyproject.toml index 2dddf3f..246d1e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,61 +1,54 @@ [build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" +requires = ["uv_build>=0.8.22,<0.9.0"] +build-backend = "uv_build" [project] name = "infrasys" -version = "0.5.2" +version = "1.0.0rc3" description = '' readme = "README.md" requires-python = ">=3.11, <3.14" license = "BSD-3-Clause" -keywords = [] +keywords = [ + "infrastructure", + "time-series", + "energy", + "grid", + "data-storage", +] authors = [ { name = "Aadil Latif", email = "aadil.latif@nrel.gov" }, { name = "Daniel Thom", email = "daniel.thom@nrel.gov" }, { name = "Kapil Duwadi", email = "kapil.duwadi@nrel.gov" }, - { name = "Pedro Andres Sanchez Perez", email = "pedroandres.sanchezperez@nrel.gov" }, + { name = "Pedro Andres Sanchez Perez", email = "pesap@users.noreply.github.com" }, { name = "Tarek Elgindy", email = "tarek.elgindy@nrel.gov" }, ] classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Programming Language :: Python", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] dependencies = [ - "loguru~=0.7.2", + "h5py>=3.13,<4", + "loguru>=0.7,<0.8", "numpy >= 2, < 3", - "pandas >= 2, < 3", + "orjson>=3.11.5,<4", + "pandas>=2,<3", "pint~=0.23", - "pyarrow~=19.0", - "pydantic >= 2.7, < 3", - "python-dateutil>=2.9.0.post0", - "rich~=13.7.1", + "pyarrow>=21,<23", + "pydantic>=2.12,<3", + "python-dateutil>=2.9,<3", + "rich>=13.7,<14", ] [project.optional-dependencies] chronify = [ "chronify ~= 0.3.1", ] -dev = [ - "autodoc_pydantic~=2.0", - "furo", - "mypy >=1.13, < 2", - "myst_parser", - "pandas-stubs", - "pre-commit", - "pyarrow-stubs", - "pytest", - "pytest-cov", - "ruff", - "sphinx", - "sphinx-click", - "sphinx-copybutton", - "sphinx-tabs~=3.4", -] [project.urls] Documentation = "https://github.com/NREL/infrasys#readme" @@ -70,10 +63,16 @@ files = [ ] [tool.pytest.ini_options] -pythonpath = "src" -minversion = "6.0" -addopts = "-ra" +pythonpath = ["src"] testpaths = ["tests"] +addopts = [ + "-ra", + "--cov=infrasys", + "--cov-report=term-missing:skip-covered", +] +filterwarnings = [ + "ignore::duckdb_engine.DuckDBEngineWarning", +] [tool.ruff] # Exclude a variety of commonly ignored directories. @@ -121,10 +120,29 @@ docstring-code-line-length = "dynamic" [dependency-groups] dev = [ "ipython>=9.1.0", + "pre-commit", + "pytest", + "pytest-cov", + "infrasys[chronify]", + "ruff", "types-python-dateutil>=2.9.0.20241206", "mypy >=1.13, < 2", "pandas-stubs", "pyarrow-stubs", + "pytest-missing-modules>=0.2.1", + "h5py-stubs>=0.1.2", +] +docs = [ + "autodoc_pydantic~=2.0", + "furo", + "myst_parser", + "ipython>=9.1.0", + "types-python-dateutil>=2.9.0.20241206", + "pytest-missing-modules>=0.2.1", + "sphinx", + "sphinx-click", + "sphinx-copybutton", + "sphinx-tabs~=3.4", ] [tool.ruff.lint.per-file-ignores] diff --git a/scripts/conftest.py b/scripts/conftest.py new file mode 100644 index 0000000..1380665 --- /dev/null +++ b/scripts/conftest.py @@ -0,0 +1,50 @@ +import json +import pathlib + +import pytest + + +def pytest_addoption(parser): + parser.addoption( + "--json-data", + action="store", + type=str, + default=None, + help="Path to the JSON data file for both load and dump benchmarks", + ) + + +@pytest.fixture +def json_file_path(request, tmp_path): + file_path_str = request.config.getoption("--json-data") + if file_path_str: + path = pathlib.Path(file_path_str) + if not path.exists(): + pytest.fail(f"JSON data file not found at: {path}") + return path + else: + # Create a temporary JSON file with example data if no --json-data is provided + example_data = {"name": "example", "value": 123, "items": [1, 2, 3, {"nested": True}]} + temp_file = tmp_path / "example_data.json" + with open(temp_file, "w", encoding="utf-8") as f: + json.dump(example_data, f) + print(f"Using example JSON data from: {temp_file} for both load and dump benchmarks") + return temp_file + + +@pytest.fixture +def json_data_from_file(json_file_path): + """Fixture to load data from the json_file_path for dumping benchmarks.""" + try: + with open(json_file_path, "r", encoding="utf-8") as f: + data = json.load(f) + return data + except Exception as e: + pytest.fail(f"Error loading data from {json_file_path}: {e}") + return None + + +@pytest.fixture +def json_data(): + """Fixture to provide sample JSON data for dumping tests (if needed independently).""" + return {"name": "example", "value": 123, "items": [1, 2, 3, {"nested": True}]} diff --git a/scripts/json_performance.py b/scripts/json_performance.py new file mode 100644 index 0000000..31945ab --- /dev/null +++ b/scripts/json_performance.py @@ -0,0 +1,85 @@ +""" +This script benchmarks the performance of loading and dumping JSON data +using the standard `json` library and the `orjson` library. + +It can be run using `pytest`. + +Usage: + To run with a specific JSON data file from the project folder: + ```terminal + pytest scripts/json_performance.py --json-data path/to/your/data.json + ``` + + If `--json-data` is not provided, it will use a temporary example + JSON file for benchmarking. + + To compare similar operations (e.g., dumps vs dumps or loads vs loads) run the following: + ```bash + pytest scripts/json_performance -k dump + ``` + or + ```bash + pytest scripts/json_performance -k load + ``` +""" + +import json +import pathlib + +import pytest + +orjson = pytest.importorskip("orjson", reason="orjson library not installed") +pytest.importorskip("pytest_benchmark", reason="pytest-benchmark not installed") + + +def load_with_standard_json(file_path: pathlib.Path): + """Loads JSON using the standard json library.""" + with open(file_path, "r", encoding="utf-8") as f: + data = json.load(f) + return data + + +def load_with_orjson(file_path: pathlib.Path): + """Loads JSON using the orjson library.""" + with open(file_path, "rb") as f: + data = orjson.loads(f.read()) + return data + + +def dump_with_standard_json(data, target_path: pathlib.Path): + """Dumps data using the standard json library.""" + with open(target_path, "w", encoding="utf-8") as f: + json.dump(data, f) + + +def dump_with_orjson(data, target_path: pathlib.Path): + """Dumps data using the orjson library.""" + dumped_data = orjson.dumps(data) + with open(target_path, "wb") as f: + f.write(dumped_data) + + +@pytest.mark.parametrize( + "load_func", + [load_with_standard_json, load_with_orjson], + ids=["standard_json_load", "orjson_load"], +) +def test_json_load_performance(benchmark, load_func, json_file_path): + """Benchmark loading JSON from the specified file.""" + benchmark(load_func, json_file_path) + + +@pytest.mark.parametrize( + "dump_func, lib_name", + [ + (dump_with_standard_json, "standard_json"), + (dump_with_orjson, "orjson"), + ], + ids=["standard_json_dump", "orjson_dump"], +) +def test_json_dump_performance( + benchmark, dump_func, lib_name, json_file_path, tmp_path, json_data_from_file +): + """Benchmark dumping JSON data to a temporary file.""" + output_file = tmp_path / f"output_{lib_name}.json" + benchmark(dump_func, json_data_from_file, output_file) diff --git a/src/infrasys/__init__.py b/src/infrasys/__init__.py index f38a191..79c44d5 100644 --- a/src/infrasys/__init__.py +++ b/src/infrasys/__init__.py @@ -1,33 +1,43 @@ import importlib.metadata as metadata + from loguru import logger logger.disable("infrasys") __version__ = metadata.metadata("infrasys")["Version"] +TS_METADATA_FORMAT_VERSION = "1.0.0" + +TIME_SERIES_ASSOCIATIONS_TABLE = "time_series_associations" +TIME_SERIES_METADATA_TABLE = "time_series_metadata" +KEY_VALUE_STORE_TABLE = "key_value_store" +SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE = "supplemental_attribute_associations" +COMPONENT_ASSOCIATIONS_TABLE = "component_associations" +SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE = "supplemental_attribute_associations" -from .component import Component from .base_quantity import BaseQuantity +from .component import Component from .location import GeographicInfo, Location from .normalization import NormalizationModel from .supplemental_attribute import SupplementalAttribute from .system import System from .time_series_models import ( - SingleTimeSeries, + Deterministic, NonSequentialTimeSeries, - TimeSeriesStorageType, - TimeSeriesKey, + SingleTimeSeries, SingleTimeSeriesKey, + TimeSeriesKey, + TimeSeriesStorageType, ) - __all__ = ( "BaseQuantity", "Component", + "Deterministic", "GeographicInfo", "Location", + "NonSequentialTimeSeries", "NormalizationModel", "SingleTimeSeries", - "NonSequentialTimeSeries", "SingleTimeSeriesKey", "SupplementalAttribute", "System", diff --git a/src/infrasys/arrow_storage.py b/src/infrasys/arrow_storage.py index dfabdac..a14f07e 100644 --- a/src/infrasys/arrow_storage.py +++ b/src/infrasys/arrow_storage.py @@ -3,22 +3,26 @@ import atexit import shutil from datetime import datetime +from functools import singledispatchmethod from pathlib import Path from tempfile import mkdtemp from typing import Any, Optional -from functools import singledispatchmethod import numpy as np -from numpy.typing import NDArray import pyarrow as pa from loguru import logger +from numpy.typing import NDArray from infrasys.exceptions import ISNotStored from infrasys.time_series_models import ( - SingleTimeSeries, - SingleTimeSeriesMetadata, + AbstractDeterministic, + Deterministic, + DeterministicMetadata, + DeterministicTimeSeriesType, NonSequentialTimeSeries, NonSequentialTimeSeriesMetadata, + SingleTimeSeries, + SingleTimeSeriesMetadata, TimeSeriesData, TimeSeriesMetadata, TimeSeriesStorageType, @@ -51,6 +55,23 @@ def create_with_permanent_directory(cls, directory: Path) -> "ArrowTimeSeriesSto directory.mkdir(exist_ok=True) return cls(directory) + @classmethod + def deserialize( + cls, + data: dict[str, Any], + time_series_dir: Path, + dst_time_series_directory: Path | None, + read_only: bool, + **kwargs: Any, + ) -> tuple["ArrowTimeSeriesStorage", None]: + """Deserialize Arrow storage from serialized data.""" + if read_only: + storage = cls.create_with_permanent_directory(time_series_dir) + else: + storage = cls.create_with_temp_directory(base_directory=dst_time_series_directory) + storage.serialize({}, storage.get_time_series_directory(), src=time_series_dir) + return storage, None + def get_time_series_directory(self) -> Path: return self._ts_directory @@ -58,7 +79,7 @@ def add_time_series( self, metadata: TimeSeriesMetadata, time_series: TimeSeriesData, - connection: Any = None, + context: Any = None, ) -> None: self._add_time_series(time_series) @@ -99,24 +120,178 @@ def _(self, time_series): else: logger.debug("{} was already stored", time_series_uuid) + @_add_time_series.register(AbstractDeterministic) + def _(self, time_series): + """Store deterministic forecast time series data as a 2D matrix. + + Each row represents a forecast window, and each column represents a time step + in the forecast horizon. The data is stored as a single array of arrays. + """ + time_series_uuid = time_series.uuid + fpath = self._ts_directory.joinpath(f"{time_series_uuid}{EXTENSION}") + + if not fpath.exists(): + forecast_data = time_series.data_array + + forecast_list = forecast_data.tolist() + + schema = pa.schema([pa.field(str(time_series_uuid), pa.list_(pa.list_(pa.float64())))]) + + arrow_batch = pa.record_batch([pa.array([forecast_list])], schema=schema) + + # Write to disk + with pa.OSFile(str(fpath), "wb") as sink: # type: ignore + with pa.ipc.new_file(sink, arrow_batch.schema) as writer: + writer.write(arrow_batch) + + logger.trace("Saving deterministic time series to {}", fpath) + logger.debug("Added {} to time series storage", time_series_uuid) + else: + logger.debug("{} was already stored", time_series_uuid) + def get_time_series( self, metadata: TimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: Any = None, - ) -> Any: - if isinstance(metadata, SingleTimeSeriesMetadata): - return self._get_single_time_series( - metadata=metadata, start_time=start_time, length=length - ) + context: Any = None, + ) -> TimeSeriesData: + """Return a time series array using the appropriate handler based on metadata type.""" + return self._get_time_series_dispatch( + metadata, start_time=start_time, length=length, context=context + ) - elif isinstance(metadata, NonSequentialTimeSeriesMetadata): - return self._get_nonsequential_time_series(metadata=metadata) + @singledispatchmethod + def _get_time_series_dispatch( + self, + metadata: TimeSeriesMetadata, + start_time: datetime | None = None, + length: int | None = None, + context: Any = None, + ) -> TimeSeriesData: msg = f"Bug: need to implement get_time_series for {type(metadata)}" raise NotImplementedError(msg) - def remove_time_series(self, metadata: TimeSeriesMetadata, connection: Any = None) -> None: + @_get_time_series_dispatch.register(SingleTimeSeriesMetadata) + def _( + self, + metadata: SingleTimeSeriesMetadata, + start_time: datetime | None = None, + length: int | None = None, + context: Any = None, + ) -> SingleTimeSeries: + fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") + with pa.memory_map(str(fpath), "r") as source: + base_ts = pa.ipc.open_file(source).get_record_batch(0) + logger.trace("Reading time series from {}", fpath) + index, length = metadata.get_range(start_time=start_time, length=length) + columns = base_ts.column_names + if len(columns) != 1: + msg = f"Bug: expected a single column: {columns=}" + raise Exception(msg) + # This should be equal to metadata.time_series_uuid in versions + # v0.2.1 or later. Earlier versions used the time series variable name. + column = columns[0] + data = base_ts[column][index : index + length] + if metadata.units is not None: + np_data_array = metadata.units.quantity_type(data, metadata.units.units) + else: + np_data_array = np.array(data) + return SingleTimeSeries( + uuid=metadata.time_series_uuid, + name=metadata.name, + resolution=metadata.resolution, + initial_timestamp=start_time or metadata.initial_timestamp, + data=np_data_array, + ) + + @_get_time_series_dispatch.register(NonSequentialTimeSeriesMetadata) + def _( + self, + metadata: NonSequentialTimeSeriesMetadata, + start_time: datetime | None = None, + length: int | None = None, + context: Any = None, + ) -> NonSequentialTimeSeries: + fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") + with pa.memory_map(str(fpath), "r") as source: + base_ts = pa.ipc.open_file(source).get_record_batch(0) + logger.trace("Reading time series from {}", fpath) + columns = base_ts.column_names + if len(columns) != 2: + msg = f"Bug: expected two columns: {columns=}" + raise Exception(msg) + data_column, timestamps_column = columns[0], columns[1] + data, timestamps = ( + base_ts[data_column], + base_ts[timestamps_column], + ) + if metadata.units is not None: + np_data_array = metadata.units.quantity_type(data, metadata.units.units) + else: + np_data_array = np.array(data) + np_time_array = np.array(timestamps).astype("O") # convert to datetime object + return NonSequentialTimeSeries( + uuid=metadata.time_series_uuid, + name=metadata.name, + data=np_data_array, + timestamps=np_time_array, + normalization=metadata.normalization, + ) + + @_get_time_series_dispatch.register(DeterministicMetadata) + def _( + self, + metadata: DeterministicMetadata, + start_time: datetime | None = None, + length: int | None = None, + context: Any = None, + ) -> DeterministicTimeSeriesType: + # Load deterministic data from file + fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") + + if not fpath.exists(): + msg = f"No time series with {metadata.time_series_uuid} is stored" + raise ISNotStored(msg) + + # Regular Deterministic with stored 2D data - check if it's actually a nested array + with pa.memory_map(str(fpath), "r") as source: + base_ts = pa.ipc.open_file(source).get_record_batch(0) + logger.trace("Reading time series from {}", fpath) + + columns = base_ts.column_names + if len(columns) != 1: + msg = f"Bug: expected a single column: {columns=}" + raise Exception(msg) + + column = columns[0] + + # Check if this is a nested array (Deterministic) or flat array (SingleTimeSeries used for Deterministic) + data = base_ts[column] + if isinstance(data, pa.ListArray): + # Regular Deterministic with 2D data stored as nested arrays + data = data[0] # Get the nested array + if metadata.units is not None: + np_array = metadata.units.quantity_type(data, metadata.units.units) + else: + np_array = np.array(data) + + return Deterministic( + uuid=metadata.time_series_uuid, + name=metadata.name, + resolution=metadata.resolution, + initial_timestamp=metadata.initial_timestamp, + horizon=metadata.horizon, + interval=metadata.interval, + window_count=metadata.window_count, + data=np_array, + normalization=metadata.normalization, + ) + else: + msg = f"Unsupported metadata type for Deterministic: {type(metadata)}" + raise ValueError(msg) + + def remove_time_series(self, metadata: TimeSeriesMetadata, context: Any = None) -> None: fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") if not fpath.exists(): msg = f"No time series with {metadata.time_series_uuid} is stored" @@ -152,6 +327,7 @@ def _get_single_time_series( metadata: SingleTimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, + context: Any = None, ) -> SingleTimeSeries: fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") with pa.memory_map(str(fpath), "r") as source: @@ -162,57 +338,21 @@ def _get_single_time_series( if len(columns) != 1: msg = f"Bug: expected a single column: {columns=}" raise Exception(msg) - # This should be equal to metadata.time_series_uuid in versions - # v0.2.1 or later. Earlier versions used the time series variable name. column = columns[0] data = base_ts[column][index : index + length] - if metadata.quantity_metadata is not None: - np_array = metadata.quantity_metadata.quantity_type( - data, metadata.quantity_metadata.units - ) + if metadata.units is not None: + np_array = metadata.units.quantity_type(data, metadata.units.units) else: np_array = np.array(data) return SingleTimeSeries( uuid=metadata.time_series_uuid, - variable_name=metadata.variable_name, + name=metadata.name, resolution=metadata.resolution, - initial_time=start_time or metadata.initial_time, + initial_timestamp=start_time or metadata.initial_timestamp, data=np_array, normalization=metadata.normalization, ) - def _get_nonsequential_time_series( - self, - metadata: NonSequentialTimeSeriesMetadata, - ) -> NonSequentialTimeSeries: - fpath = self._ts_directory.joinpath(f"{metadata.time_series_uuid}{EXTENSION}") - with pa.memory_map(str(fpath), "r") as source: - base_ts = pa.ipc.open_file(source).get_record_batch(0) - logger.trace("Reading time series from {}", fpath) - columns = base_ts.column_names - if len(columns) != 2: - msg = f"Bug: expected two columns: {columns=}" - raise Exception(msg) - data_column, timestamps_column = columns[0], columns[1] - data, timestamps = ( - base_ts[data_column], - base_ts[timestamps_column], - ) - if metadata.quantity_metadata is not None: - np_data_array = metadata.quantity_metadata.quantity_type( - data, metadata.quantity_metadata.units - ) - else: - np_data_array = np.array(data) - np_time_array = np.array(timestamps).astype("O") # convert to datetime object - return NonSequentialTimeSeries( - uuid=metadata.time_series_uuid, - variable_name=metadata.variable_name, - data=np_data_array, - timestamps=np_time_array, - normalization=metadata.normalization, - ) - def _convert_to_record_batch_single_time_series( self, time_series_array: NDArray, column: str ) -> pa.RecordBatch: diff --git a/src/infrasys/base_quantity.py b/src/infrasys/base_quantity.py index a52fbfb..dd7abaa 100644 --- a/src/infrasys/base_quantity.py +++ b/src/infrasys/base_quantity.py @@ -36,7 +36,6 @@ def __get_pydantic_core_schema__( return core_schema.with_info_after_validator_function( cls._validate, core_schema.any_schema(), - field_name=handler.field_name, serialization=core_schema.plain_serializer_function_ser_schema( cls._serialize, info_arg=True, @@ -50,15 +49,15 @@ def _validate(cls, field_value: Any, _: core_schema.ValidationInfo) -> "BaseQuan # Type check is more robubst to check that is not an instance of a bare "BaseQuantity" if type(field_value) is cls: if cls.__base_unit__: - assert field_value.check( - cls.__base_unit__ - ), f"Unit must be compatible with {cls.__base_unit__}" + assert field_value.check(cls.__base_unit__), ( + f"Unit must be compatible with {cls.__base_unit__}" + ) return field_value if isinstance(field_value, pint.Quantity): if cls.__base_unit__: - assert field_value.check( - cls.__base_unit__ - ), f"Unit must be compatible with {cls.__base_unit__}" + assert field_value.check(cls.__base_unit__), ( + f"Unit must be compatible with {cls.__base_unit__}" + ) return cls(field_value.magnitude, field_value.units) return cls(field_value, cls.__base_unit__) diff --git a/src/infrasys/chronify_time_series_storage.py b/src/infrasys/chronify_time_series_storage.py index 98a8fbc..f0e40f3 100644 --- a/src/infrasys/chronify_time_series_storage.py +++ b/src/infrasys/chronify_time_series_storage.py @@ -6,9 +6,10 @@ from functools import singledispatch from pathlib import Path from tempfile import NamedTemporaryFile -from typing import Any, Generator, Self +from typing import Any, Generator, Literal, Self from uuid import UUID +import numpy as np import pandas as pd import pint from chronify import DatetimeRange, Store, TableSchema @@ -18,6 +19,9 @@ from infrasys.exceptions import ISFileExists, ISInvalidParameter from infrasys.id_manager import IDManager from infrasys.time_series_models import ( + Deterministic, + DeterministicMetadata, + DeterministicTimeSeriesType, SingleTimeSeries, SingleTimeSeriesKey, SingleTimeSeriesMetadata, @@ -29,7 +33,6 @@ from infrasys.time_series_storage_base import TimeSeriesStorageBase from infrasys.utils.path_utils import delete_if_exists - _SINGLE_TIME_SERIES_BASE_NAME = "single_time_series" _TIME_SERIES_FILENAME = "time_series_data.db" @@ -111,6 +114,33 @@ def from_file(cls, data: dict[str, Any], read_only: bool = False) -> Self: store = Store(engine_name=data["engine_name"], file_path=Path(data["filename"])) return cls(store, id_manager, read_only=read_only, uuid_lookup=uuid_lookup) + @classmethod + def deserialize( + cls, + data: dict[str, Any], + time_series_dir: Path, + dst_time_series_directory: Path | None, + read_only: bool, + **kwargs: Any, + ) -> tuple["ChronifyTimeSeriesStorage", None]: + """Deserialize Chronify storage from serialized data.""" + # Update the filename in data to point to the extracted location + # data["filename"] contains an absolute path from the original save location + # We need to replace it with the path in the extracted directory + orig_filename = Path(data["filename"]) + extracted_filename = time_series_dir / orig_filename.name + data["filename"] = str(extracted_filename) + + if read_only: + storage = cls.from_file(data, read_only=True) + else: + storage = cls.from_file_to_tmp_file( + data, + dst_dir=dst_time_series_directory, + read_only=read_only, + ) + return storage, None + @staticmethod def _deserialize_ids(data: dict[str, Any]) -> tuple[IDManager, dict[UUID, int]]: uuid_lookup: dict[UUID, int] = {} @@ -136,7 +166,7 @@ def add_time_series( self, metadata: TimeSeriesMetadata, time_series: TimeSeriesData, - connection: Connection | None = None, + context: Connection | None = None, ) -> None: if not isinstance(time_series, SingleTimeSeries): msg = f"Bug: need to implement add_time_series for {type(time_series)}" @@ -151,13 +181,13 @@ def add_time_series( schema = _make_table_schema(time_series, _get_table_name(time_series)) # There is no reason to run time checks because we are generating the timestamps # from initial_time, resolution, and length, so they are guaranteed to be correct. - self._store.ingest_table(df, schema, connection=connection, skip_time_checks=False) + self._store.ingest_table(df, schema, connection=context, skip_time_checks=False) self._uuid_lookup[time_series.uuid] = db_id logger.debug("Added {} to time series storage", time_series.summary) - def check_timestamps(self, key: TimeSeriesKey, connection: Connection | None = None) -> None: + def check_timestamps(self, key: TimeSeriesKey, context: Connection | None = None) -> None: table_name = _get_table_name(key) - self._store.check_timestamps(table_name, connection=connection) + self._store.check_timestamps(table_name, connection=context) def get_engine_name(self) -> str: """Return the name of the underlying database engine.""" @@ -168,25 +198,36 @@ def get_time_series( metadata: TimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: Connection | None = None, + context: Connection | None = None, ) -> Any: if isinstance(metadata, SingleTimeSeriesMetadata): return self._get_single_time_series( metadata=metadata, start_time=start_time, length=length, - connection=connection, + context=context, + ) + elif isinstance(metadata, DeterministicMetadata): + # For DeterministicMetadata, we need to check if it's a regular Deterministic + # or a DeterministicSingleTimeSeries. We do this by checking the data structure. + # Since chronify doesn't easily support checking if data is 2D vs 1D without loading, + # we'll load and check the data structure. + return self._get_deterministic( + metadata=metadata, + start_time=start_time, + length=length, + context=context, ) msg = f"Bug: need to implement get_time_series for {type(metadata)}" raise NotImplementedError(msg) def remove_time_series( - self, metadata: TimeSeriesMetadata, connection: Connection | None = None + self, metadata: TimeSeriesMetadata, context: Connection | None = None ) -> None: db_id = self._get_db_id(metadata.time_series_uuid) table_name = _get_table_name(metadata) - num_deleted = self._store.delete_rows(table_name, {"id": db_id}, connection=connection) + num_deleted = self._store.delete_rows(table_name, {"id": db_id}, connection=context) if num_deleted < 1: msg = f"Failed to delete rows in the chronfiy database for {metadata.time_series_uuid}" raise ISInvalidParameter(msg) @@ -208,7 +249,7 @@ def _get_single_time_series( metadata: SingleTimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: Connection | None = None, + context: Connection | None = None, ) -> SingleTimeSeries: table_name = _get_table_name(metadata) db_id = self._get_db_id(metadata.time_series_uuid) @@ -231,29 +272,93 @@ def _get_single_time_series( table_name, query, params=tuple(params), - connection=connection, + connection=context, ) if len(df) != required_len: msg = f"Bug: {len(df)=} {length=} {required_len=}" raise Exception(msg) values = df["value"].values - if metadata.quantity_metadata is not None: - np_array = metadata.quantity_metadata.quantity_type( - values, metadata.quantity_metadata.units - ) + if metadata.units is not None: + np_array = metadata.units.quantity_type(values, metadata.units.units) else: np_array = values return SingleTimeSeries( uuid=metadata.time_series_uuid, - variable_name=metadata.variable_name, + name=metadata.name, resolution=metadata.resolution, - initial_time=start_time or metadata.initial_time, + initial_timestamp=start_time or metadata.initial_timestamp, data=np_array, normalization=metadata.normalization, ) + def _get_deterministic( + self, + metadata: DeterministicMetadata, + start_time: datetime | None = None, + length: int | None = None, + context: Connection | None = None, + ) -> DeterministicTimeSeriesType: + """Get Deterministic data - either regular or from SingleTimeSeries. + + This method checks if the data is stored as a regular Deterministic (not implemented) + or as a DeterministicSingleTimeSeries (references SingleTimeSeries). + For now, we assume it's always DeterministicSingleTimeSeries in chronify. + """ + # Load the referenced SingleTimeSeries data using time_series_uuid + table_name = f"{_SINGLE_TIME_SERIES_BASE_NAME}_{metadata.time_series_uuid}" + db_id = self._get_db_id(metadata.time_series_uuid) + + query = f""" + SELECT timestamp, value + FROM {table_name} + WHERE id = ? + ORDER BY timestamp ASC + """ + df = self._store.query(query, params=[db_id], connection=context) # type: ignore + + if df.empty: + msg = f"No SingleTimeSeries with {metadata.time_series_uuid} is stored" + from infrasys.exceptions import ISNotStored + + raise ISNotStored(msg) + + single_ts_data = df["value"].to_numpy() + + if metadata.units is not None: + np_data_array = metadata.units.quantity_type(single_ts_data, metadata.units.units) + else: + np_data_array = single_ts_data + + horizon_steps = int(metadata.horizon / metadata.resolution) + interval_steps = int(metadata.interval / metadata.resolution) + + forecast_matrix = np.zeros((metadata.window_count, horizon_steps)) + + for window_idx in range(metadata.window_count): + start_idx = window_idx * interval_steps + end_idx = start_idx + horizon_steps + forecast_matrix[window_idx, :] = np_data_array[start_idx:end_idx] + + # If original data was a pint.Quantity, wrap the result + if metadata.units is not None: + forecast_matrix = metadata.units.quantity_type(forecast_matrix, metadata.units.units) + + return Deterministic( + uuid=metadata.time_series_uuid, + name=metadata.name, + resolution=metadata.resolution, + initial_timestamp=metadata.initial_timestamp, + horizon=metadata.horizon, + interval=metadata.interval, + window_count=metadata.window_count, + data=forecast_matrix, + normalization=metadata.normalization, + ) + @contextmanager - def open_time_series_store(self) -> Generator[Connection, None, None]: + def open_time_series_store( + self, mode: Literal["r", "r+", "a", "w", "w-"] = "a" + ) -> Generator[Connection, None, None]: with self._store.engine.begin() as conn: yield conn @@ -273,6 +378,13 @@ def _get_db_id(self, time_series_uuid: UUID) -> int: raise Exception(msg) return db_id + def close(self) -> None: + """Dispose of the underlying store/engine.""" + try: + self._store.dispose() + except Exception: + logger.debug("Error disposing chronify store", exc_info=True) + @singledispatch def _get_table_name(time_series) -> str: @@ -283,31 +395,31 @@ def _get_table_name(time_series) -> str: @_get_table_name.register(SingleTimeSeries) def _(time_series) -> str: return _get_single_time_series_table_name( - time_series.initial_time, time_series.resolution, time_series.length + time_series.initial_timestamp, time_series.resolution, time_series.length ) @_get_table_name.register(SingleTimeSeriesMetadata) def _(metadata) -> str: return _get_single_time_series_table_name( - metadata.initial_time, metadata.resolution, metadata.length + metadata.initial_timestamp, metadata.resolution, metadata.length ) @_get_table_name.register(SingleTimeSeriesKey) def _(key) -> str: - return _get_single_time_series_table_name(key.initial_time, key.resolution, key.length) + return _get_single_time_series_table_name(key.initial_timestamp, key.resolution, key.length) def _get_single_time_series_table_name( - initial_time: datetime, + initial_timestamp: datetime, resolution: timedelta, length: int, ) -> str: return "_".join( ( _SINGLE_TIME_SERIES_BASE_NAME, - initial_time.isoformat().replace("-", "_").replace(":", "_"), + initial_timestamp.isoformat().replace("-", "_").replace(":", "_"), str(resolution.seconds), str(length), ) @@ -334,7 +446,7 @@ def _make_time_config(time_series) -> Any: @_make_time_config.register(SingleTimeSeries) def _(time_series: SingleTimeSeries) -> DatetimeRange: return DatetimeRange( - start=time_series.initial_time, + start=time_series.initial_timestamp, resolution=time_series.resolution, length=len(time_series.data), time_column="timestamp", diff --git a/src/infrasys/component.py b/src/infrasys/component.py index 0199e1b..6344853 100644 --- a/src/infrasys/component.py +++ b/src/infrasys/component.py @@ -31,7 +31,7 @@ def check_component_addition(self) -> None: def model_dump_custom(self, *args, **kwargs) -> dict[str, Any]: """Custom serialization for this package""" refs = {} - for x in self.model_fields: + for x in type(self).model_fields: val = self._model_dump_field(x) if val is not None: refs[x] = val @@ -50,8 +50,8 @@ def _model_dump_field(self, field) -> Any: val = [{TYPE_METADATA: serialize_component_reference(x)} for x in val] elif isinstance(val, BaseQuantity | pint.Quantity): data = val.to_dict() - data[TYPE_METADATA] = SerializedTypeMetadata( - fields=SerializedQuantityType( + data[TYPE_METADATA] = SerializedTypeMetadata.validate_python( + SerializedQuantityType( module=val.__module__, type=val.__class__.__name__, ), @@ -69,8 +69,8 @@ def pprint(self): def serialize_component_reference(component: Component) -> dict[str, Any]: """Make a JSON serializable reference to a component.""" - return SerializedTypeMetadata( - fields=SerializedComponentReference( + return SerializedTypeMetadata.validate_python( + SerializedComponentReference( module=component.__module__, type=component.__class__.__name__, uuid=component.uuid, diff --git a/src/infrasys/component_associations.py b/src/infrasys/component_associations.py index a2eb53d..fac0756 100644 --- a/src/infrasys/component_associations.py +++ b/src/infrasys/component_associations.py @@ -1,47 +1,24 @@ -import sqlite3 from typing import Optional, Type from uuid import UUID from loguru import logger -from infrasys.component import Component +from infrasys import COMPONENT_ASSOCIATIONS_TABLE, Component from infrasys.utils.classes import get_all_concrete_subclasses -from infrasys.utils.sqlite import execute +from infrasys.utils.metadata_utils import create_component_associations_table +from infrasys.utils.sqlite import create_in_memory_db, execute class ComponentAssociations: """Stores associations between components. Allows callers to quickly find components composed by other components, such as the generator to which a bus is connected.""" - TABLE_NAME = "component_associations" - def __init__(self) -> None: # This uses a different database because it is not persisted when the system # is saved to files. It will be rebuilt during de-serialization. - self._con = sqlite3.connect(":memory:") - self._create_metadata_table() - - def _create_metadata_table(self): - schema = [ - "id INTEGER PRIMARY KEY", - "component_uuid TEXT", - "component_type TEXT", - "attached_component_uuid TEXT", - "attached_component_type TEXT", - ] - schema_text = ",".join(schema) - cur = self._con.cursor() - execute(cur, f"CREATE TABLE {self.TABLE_NAME}({schema_text})") - execute( - cur, - f"CREATE INDEX by_c_uuid ON {self.TABLE_NAME}(component_uuid)", - ) - execute( - cur, - f"CREATE INDEX by_a_uuid ON {self.TABLE_NAME}(attached_component_uuid)", - ) - self._con.commit() - logger.debug("Created in-memory component associations table") + self._con = create_in_memory_db(":memory:") + self._closed = False + create_component_associations_table(self._con, table_name=COMPONENT_ASSOCIATIONS_TABLE) def add(self, *components: Component): """Store an association between each component and directly attached subcomponents. @@ -65,7 +42,7 @@ def add(self, *components: Component): def clear(self) -> None: """Clear all component associations.""" - execute(self._con.cursor(), f"DELETE FROM {self.TABLE_NAME}") + execute(self._con.cursor(), f"DELETE FROM {COMPONENT_ASSOCIATIONS_TABLE}") logger.info("Cleared all component associations.") def list_child_components( @@ -80,7 +57,9 @@ def list_child_components( res = _make_params_and_where_clause(component_type, "attached_component_type") params.extend(res[0]) where_clause += res[1] - query = f"SELECT attached_component_uuid FROM {self.TABLE_NAME} {where_clause}" + query = ( + f"SELECT attached_component_uuid FROM {COMPONENT_ASSOCIATIONS_TABLE} {where_clause}" + ) cur = self._con.cursor() return [UUID(x[0]) for x in execute(cur, query, params)] @@ -96,7 +75,7 @@ def list_parent_components( res = _make_params_and_where_clause(component_type, "component_type") params.extend(res[0]) where_clause += res[1] - query = f"SELECT component_uuid FROM {self.TABLE_NAME} {where_clause}" + query = f"SELECT component_uuid FROM {COMPONENT_ASSOCIATIONS_TABLE} {where_clause}" cur = self._con.cursor() return [UUID(x[0]) for x in execute(cur, query, params)] @@ -104,17 +83,26 @@ def remove(self, component: Component) -> None: """Delete all rows with this component.""" query = f""" DELETE - FROM {self.TABLE_NAME} + FROM {COMPONENT_ASSOCIATIONS_TABLE} WHERE component_uuid = ? OR attached_component_uuid = ? """ params = [str(component.uuid), str(component.uuid)] execute(self._con.cursor(), query, params) logger.debug("Removed all associations with component {}", component.label) + def close(self) -> None: + """Close the backing SQLite connection.""" + if self._closed: + return + try: + self._con.close() + finally: + self._closed = True + def _insert_rows(self, rows: list[tuple]) -> None: cur = self._con.cursor() placeholder = ",".join(["?"] * len(rows[0])) - query = f"INSERT INTO {self.TABLE_NAME} VALUES({placeholder})" + query = f"INSERT INTO {COMPONENT_ASSOCIATIONS_TABLE} VALUES({placeholder})" try: cur.executemany(query, rows) finally: diff --git a/src/infrasys/component_manager.py b/src/infrasys/component_manager.py index cf6b45c..347171c 100644 --- a/src/infrasys/component_manager.py +++ b/src/infrasys/component_manager.py @@ -400,6 +400,13 @@ def _handle_composed_component(self, component: Component) -> None: ) raise ISOperationNotAllowed(msg) + def close(self) -> None: + """Release resources held by the component manager.""" + try: + self._associations.close() + except Exception: + logger.debug("Error closing component associations", exc_info=True) + def raise_if_attached(self, component: Component): """Raise an exception if this component is attached to a system.""" if component.uuid in self._components_by_uuid: diff --git a/src/infrasys/h5_time_series_storage.py b/src/infrasys/h5_time_series_storage.py new file mode 100644 index 0000000..4dffc9b --- /dev/null +++ b/src/infrasys/h5_time_series_storage.py @@ -0,0 +1,494 @@ +import functools +import shutil +import sqlite3 +import tempfile +from contextlib import contextmanager +from datetime import datetime +from functools import singledispatchmethod +from pathlib import Path +from typing import Any, Generator, Literal, Optional + +import h5py +from loguru import logger + +from infrasys.exceptions import ISNotStored +from infrasys.time_series_models import ( + Deterministic, + DeterministicMetadata, + DeterministicTimeSeriesType, + SingleTimeSeries, + SingleTimeSeriesMetadata, + TimeSeriesData, + TimeSeriesMetadata, + TimeSeriesStorageType, +) +from infrasys.time_series_storage_base import TimeSeriesStorageBase +from infrasys.utils.h5_utils import copy_h5_group, extract_h5_dataset_to_bytes, open_h5_file +from infrasys.utils.sqlite import create_in_memory_db + +from .time_series_metadata_store import TimeSeriesMetadataStore + +TIME_SERIES_DATA_FORMAT_VERSION = "1.0.0" +TIME_SERIES_VERSION_KEY = "data_format_version" + + +def file_handle(func): + """Decorator to ensure a valid HDF5 file handle (context) is available.""" + + @functools.wraps(func) + def wrapper(self, *args, **kwargs): + context = kwargs.pop("context", None) + if context is not None: + return func(self, *args, context=context, **kwargs) + else: + with self.open_time_series_store() as file_handle: + return func(self, *args, context=file_handle, **kwargs) + + return wrapper + + +class HDF5TimeSeriesStorage(TimeSeriesStorageBase): + """Stores time series in an h5 file.""" + + STORAGE_FILE = "time_series_storage.h5" + HDF5_TS_ROOT_PATH = "time_series" + HDF5_TS_METADATA_ROOT_PATH = "time_series_metadata" + + def __init__( + self, + directory: Path, + **kwargs, + ) -> None: + """Initialize the HDF5 time series storage. + + Parameters + ---------- + directory : Path + Directory to store the HDF5 file + """ + self.directory = directory + self._fpath = self.directory / self.STORAGE_FILE + self._file_handle = None + self._check_root() + + @classmethod + def deserialize( + cls, + data: dict[str, Any], + time_series_dir: Path, + dst_time_series_directory: Path | None, + read_only: bool, + **kwargs: Any, + ) -> tuple["HDF5TimeSeriesStorage", "TimeSeriesMetadataStore"]: + """Deserialize HDF5 storage from serialized data.""" + + # Copy the HDF5 file to a temporary or permanent location before the + # temp directory is cleaned up + if dst_time_series_directory is not None: + dst_dir = dst_time_series_directory + dst_dir.mkdir(parents=True, exist_ok=True) + else: + import tempfile + + dst_dir = Path(tempfile.mkdtemp()) + + src_h5_file = time_series_dir / cls.STORAGE_FILE + dst_h5_file = dst_dir / cls.STORAGE_FILE + + if src_h5_file.exists(): + shutil.copy2(src_h5_file, dst_h5_file) + + logger.debug("Copied HDF5 file from {} to {}", src_h5_file, dst_h5_file) + + storage = cls(directory=dst_dir, **kwargs) + metadata_store = TimeSeriesMetadataStore(storage.get_metadata_store(), initialize=False) + return storage, metadata_store + + @contextmanager + def open_time_series_store( + self, mode: Literal["r", "r+", "a", "w", "w-"] = "a" + ) -> Generator[h5py.File, None, None]: + assert self._fpath + self._file_handle = None + + # H5PY ensures closing of the file after the with statement. + with open_h5_file(self._fpath, mode=mode) as file_handle: + yield file_handle + + def get_time_series_directory(self) -> Path: + return self.directory + + def _check_root(self) -> None: + """Check the root group exist on the hdf5.""" + with self.open_time_series_store() as file_handle: + if self.HDF5_TS_ROOT_PATH not in file_handle: + root = file_handle.create_group(self.HDF5_TS_ROOT_PATH) + root.attrs[TIME_SERIES_VERSION_KEY] = TIME_SERIES_DATA_FORMAT_VERSION + + if self.HDF5_TS_METADATA_ROOT_PATH not in file_handle: + file_handle.create_group(self.HDF5_TS_METADATA_ROOT_PATH) + return + + def _serialize_compression_settings(self, compression_level: int = 5) -> None: + """Add default compression settings.""" + with self.open_time_series_store() as file_handle: + root = file_handle[self.HDF5_TS_ROOT_PATH] + root.attrs["compression_enabled"] = False + root.attrs["compression_type"] = "DEFLATE" + root.attrs["compression_level"] = compression_level + root.attrs["compression_shuffle"] = True + return None + + @staticmethod + def add_serialized_data(data: dict[str, Any]) -> None: + """Add metadata to indicate the storage type. + + Parameters + ---------- + data : dict[str, Any] + Metadata dictionary to which the storage type will be added + + Notes + ----- + This method adds a key `time_series_storage_type` with the value + corresponding to the storage type `HDF5` to the metadata dictionary. + """ + data["time_series_storage_type"] = str(TimeSeriesStorageType.HDF5) + + def add_time_series( + self, + metadata: TimeSeriesMetadata, + time_series: TimeSeriesData, + context: Any = None, + compression_level: int = 5, + ) -> None: + """Store a time series array. + + Parameters + ---------- + metadata : infrasys.time_series_models.TimeSeriesMetadata + Metadata for the time series + time_series : infrasys.time_series_models.TimeSeriesData + Time series data to store + context : Any, optional + Optional context parameter, by default None + compression_level: int, defaults to 5 + Optional compression level for `gzip` (0 for no compression, 10, for max compression) + + See Also + -------- + _add_time_series_dispatch : Dispatches the call to the correct handler based on metadata type. + """ + if context is not None: + self._add_time_series_dispatch( + metadata, time_series, context=context, compression_level=compression_level + ) + else: + with self.open_time_series_store() as file_handle: + self._add_time_series_dispatch( + metadata, time_series, context=file_handle, compression_level=compression_level + ) + + @singledispatchmethod + def _add_time_series_dispatch( + self, + metadata: TimeSeriesMetadata, + time_series: TimeSeriesData, + context: Any = None, + compression_level: int = 5, + ) -> None: + """Dispatches the call to the correct handler based on metadata type. + + Parameters + ---------- + metadata : infrasys.time_series_models.TimeSeriesMetadata + Metadata for the time series + time_series : infrasys.time_series_models.TimeSeriesData + Time series data to store + context : Any, optional + Optional context parameter, by default None + compression_level: int, defaults to 5 + Optional compression level for `gzip` (0 for no compression, 10, for max compression) + + Raises + ------ + NotImplementedError + If no handler is implemented for the given metadata type + """ + msg = f"Bug: need to implement add_time_series for {type(metadata)}" + raise NotImplementedError(msg) + + @_add_time_series_dispatch.register(SingleTimeSeriesMetadata) + def _( + self, + metadata: SingleTimeSeriesMetadata, + time_series: SingleTimeSeries, + context: Any = None, + compression_level: int = 5, + **kwargs: Any, + ) -> None: + """Store a SingleTimeSeries array. + + Parameters + ---------- + metadata : infrasys.time_series_models.SingleTimeSeriesMetadata + Metadata for the single time series + time_series : infrasys.time_series_models.SingleTimeSeries + Single time series data to store + context : Any + HDF5 file handle + compression_level: int, defaults to 5 + Optional compression level for `gzip` (0 for no compression, 10, for max compression) + + See Also + -------- + add_time_series : Public method for adding time series. + """ + assert context is not None + + root = context[self.HDF5_TS_ROOT_PATH] + uuid = str(metadata.time_series_uuid) + + if uuid not in root: + group = root.create_group(uuid) + + group.create_dataset( + "data", data=time_series.data_array, compression=compression_level + ) + + group.attrs["type"] = metadata.type + group.attrs["initial_timestamp"] = metadata.initial_timestamp.isoformat() + group.attrs["resolution"] = metadata.resolution.total_seconds() + + # NOTE: This was added for compatibility with + # InfrastructureSystems. In reality, this should not affect any + # other implementation + group.attrs["module"] = "InfrastructureSystems" + group.attrs["data_type"] = "Float64" + + @_add_time_series_dispatch.register(DeterministicMetadata) + def _( + self, + metadata: DeterministicMetadata, + time_series: Deterministic, + context: Any = None, + compression_level: int = 5, + **kwargs: Any, + ) -> None: + """Store a Deterministic array. + + Parameters + ---------- + metadata : infrasys.time_series_models.DeterministicMetadata + Metadata for the deterministic time series + time_series : infrasys.time_series_models.DeterministicTimeSeries + Deterministic time series data to store + context : Any + HDF5 file handle + compression_level: int, defaults to 5 + Optional compression level for `gzip` (0 for no compression, 10, for max compression) + + See Also + -------- + add_time_series : Public method for adding time series. + """ + assert context is not None + + root = context[self.HDF5_TS_ROOT_PATH] + uuid = str(metadata.time_series_uuid) + + if uuid not in root: + group = root.create_group(uuid) + + group.create_dataset( + "data", data=time_series.data_array, compression=compression_level + ) + + group.attrs["type"] = metadata.type + group.attrs["initial_timestamp"] = metadata.initial_timestamp.isoformat() + group.attrs["resolution"] = metadata.resolution.total_seconds() + group.attrs["horizon"] = metadata.horizon.total_seconds() + group.attrs["interval"] = metadata.interval.total_seconds() + group.attrs["window_count"] = metadata.window_count + + def get_metadata_store(self) -> sqlite3.Connection: + """Get the metadata store. + + Returns + ------- + TimeSeriesMetadataStore + The metadata store + """ + with self.open_time_series_store() as file_handle: + ts_metadata = extract_h5_dataset_to_bytes(file_handle, self.HDF5_TS_METADATA_ROOT_PATH) + conn = create_in_memory_db(":memory:") + with tempfile.NamedTemporaryFile(delete=False) as tmp: + temp_file_path = tmp.name + tmp.write(ts_metadata) + backup_conn = sqlite3.connect(temp_file_path) + with conn: + backup_conn.backup(conn) + backup_conn.close() + return conn + + def get_time_series( + self, + metadata: TimeSeriesMetadata, + start_time: Optional[datetime] = None, + length: Optional[int] = None, + context: Any = None, + ) -> TimeSeriesData: + """Return a time series array using the appropriate handler based on metadata type.""" + if context is not None: + return self._get_time_series_dispatch(metadata, start_time, length, context=context) + else: + with self.open_time_series_store() as file_handle: + return self._get_time_series_dispatch( + metadata, start_time, length, context=file_handle + ) + + @singledispatchmethod + def _get_time_series_dispatch( + self, + metadata: TimeSeriesMetadata, + start_time: Optional[datetime] = None, + length: Optional[int] = None, + context: Any = None, + ) -> TimeSeriesData: + msg = f"Bug: need to implement get_time_series for {type(metadata)}" + raise NotImplementedError(msg) + + @_get_time_series_dispatch.register(SingleTimeSeriesMetadata) + def _( + self, + metadata: SingleTimeSeriesMetadata, + start_time: Optional[datetime] = None, + length: Optional[int] = None, + context: Any = None, + ) -> SingleTimeSeries: + """Return a SingleTimeSeries array.""" + assert context is not None + + root = context[self.HDF5_TS_ROOT_PATH] + uuid = str(metadata.time_series_uuid) + + if uuid not in root: + msg = f"Time series with {uuid=} not found" + raise ISNotStored(msg) + + dataset = root[uuid]["data"] + + index, length = metadata.get_range(start_time=start_time, length=length) + data = dataset[index : index + length] + if metadata.units is not None: + data = metadata.units.quantity_type(data, metadata.units.units) + return SingleTimeSeries( + uuid=metadata.time_series_uuid, + name=metadata.name, + resolution=metadata.resolution, + initial_timestamp=start_time or metadata.initial_timestamp, + data=data, + normalization=metadata.normalization, + ) + + @_get_time_series_dispatch.register(DeterministicMetadata) + def _( + self, + metadata: DeterministicMetadata, + start_time: Optional[datetime] = None, + length: Optional[int] = None, + context: Any = None, + ) -> DeterministicTimeSeriesType: + """Return a Deterministic time series array.""" + assert context is not None + + root = context[self.HDF5_TS_ROOT_PATH] + uuid = str(metadata.time_series_uuid) + + if uuid not in root: + msg = f"Time series with {uuid=} not found" + raise ISNotStored(msg) + + dataset = root[uuid]["data"] + data = dataset[:] + + # Regular Deterministic with stored multidimensional data + if data.ndim == 1: + msg = "Single-dimensional data is not supported for Deterministic time series" + raise ValueError(msg) + + if metadata.units is not None: + data = metadata.units.quantity_type(data, metadata.units.units) + + return Deterministic( + uuid=metadata.time_series_uuid, + name=metadata.name, + resolution=metadata.resolution, + initial_timestamp=metadata.initial_timestamp, + horizon=metadata.horizon, + interval=metadata.interval, + window_count=metadata.window_count, + data=data, + normalization=metadata.normalization, + ) + + @file_handle + def remove_time_series(self, metadata: TimeSeriesMetadata, context: Any = None) -> None: + """Remove a time series array. + + Parameters + ---------- + metadata : infrasys.time_series_models.TimeSeriesMetadata + Metadata for the time series to remove. + context : Any, optional + Optional HDF5 file handle; if not provided, one is opened. + + Raises + ------ + ISNotStored + If the time series with the specified UUID doesn't exist. + """ + root = context[self.HDF5_TS_ROOT_PATH] + uuid = str(metadata.time_series_uuid) + if uuid not in root: + msg = f"Time series with {uuid=} not found" + raise ISNotStored(msg) + del root[uuid] + meta_group = context[self.HDF5_TS_METADATA_ROOT_PATH] + if uuid in meta_group: + del meta_group[uuid] + + def serialize( + self, data: dict[str, Any], dst: Path | str, src: Optional[Path | str] = None + ) -> None: + """Serialize all time series to the destination directory. + + Parameters + ---------- + data : dict[str, Any] + Additional data to serialize (not used in this implementation) + dst : Path or str + Destination directory or file path + src : Path or str, optional + Optional source directory or file path + + Notes + ----- + This implementation copies the entire time series storage directory to the destination. + """ + dst_path = Path(dst) / self.STORAGE_FILE if Path(dst).is_dir() else Path(dst) + self.output_file = dst_path + self._serialize_compression_settings() + with self.open_time_series_store() as f: + with open_h5_file(dst_path, mode="a") as dst_file: + if self.HDF5_TS_ROOT_PATH in f: + src_group = f[self.HDF5_TS_ROOT_PATH] + if isinstance(src_group, h5py.Group): + if self.HDF5_TS_ROOT_PATH in dst_file: + del dst_file[self.HDF5_TS_ROOT_PATH] + dst_group = dst_file.create_group(self.HDF5_TS_ROOT_PATH) + copy_h5_group(src_group, dst_group) + if self.HDF5_TS_METADATA_ROOT_PATH in dst_file: + del dst_file[self.HDF5_TS_METADATA_ROOT_PATH] + data["time_series_storage_file"] = str(dst_path) + self.add_serialized_data(data) diff --git a/src/infrasys/in_memory_time_series_storage.py b/src/infrasys/in_memory_time_series_storage.py index a777464..5338808 100644 --- a/src/infrasys/in_memory_time_series_storage.py +++ b/src/infrasys/in_memory_time_series_storage.py @@ -3,17 +3,18 @@ from datetime import datetime from pathlib import Path from typing import Any, TypeAlias - -from numpy.typing import NDArray from uuid import UUID + from loguru import logger +from numpy.typing import NDArray from infrasys.exceptions import ISNotStored from infrasys.time_series_models import ( - SingleTimeSeries, - SingleTimeSeriesMetadata, + DeterministicMetadata, NonSequentialTimeSeries, NonSequentialTimeSeriesMetadata, + SingleTimeSeries, + SingleTimeSeriesMetadata, TimeSeriesData, TimeSeriesMetadata, ) @@ -36,7 +37,7 @@ def add_time_series( self, metadata: TimeSeriesMetadata, time_series: TimeSeriesData, - connection: Any = None, + context: Any = None, ) -> None: if isinstance(time_series, (SingleTimeSeries, NonSequentialTimeSeries)): if metadata.time_series_uuid not in self._arrays: @@ -61,15 +62,24 @@ def get_time_series( metadata: TimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: Any = None, + context: Any = None, ) -> TimeSeriesData: if isinstance(metadata, SingleTimeSeriesMetadata): return self._get_single_time_series(metadata, start_time, length) elif isinstance(metadata, NonSequentialTimeSeriesMetadata): return self._get_nonsequential_time_series(metadata) + elif isinstance(metadata, DeterministicMetadata): + ts_data = self._arrays.get(metadata.time_series_uuid) + if ts_data is None: + msg = f"No time series with {metadata.time_series_uuid} is stored" + raise ISNotStored(msg) + + # Deterministic time series with 1D data is not supported + msg = "Single-dimensional data is not supported for Deterministic time series" + raise ValueError(msg) raise NotImplementedError(str(metadata.get_time_series_data_type())) - def remove_time_series(self, metadata: TimeSeriesMetadata, connection: Any = None) -> None: + def remove_time_series(self, metadata: TimeSeriesMetadata, context: Any = None) -> None: time_series = self._arrays.pop(metadata.time_series_uuid, None) if time_series is None: msg = f"No time series with {metadata.time_series_uuid} is stored" @@ -81,6 +91,21 @@ def serialize( msg = "Bug: InMemoryTimeSeriesStorage.serialize should never be called." raise Exception(msg) + @classmethod + def deserialize( + cls, + data: dict[str, Any], + time_series_dir: Path, + dst_time_series_directory: Path | None, + read_only: bool, + **kwargs: Any, + ) -> tuple["InMemoryTimeSeriesStorage", None]: + """Deserialize in-memory storage - should not be called during normal deserialization.""" + msg = "De-serialization does not support in-memory time series storage." + from infrasys.exceptions import ISOperationNotAllowed + + raise ISOperationNotAllowed(msg) + def _get_single_time_series( self, metadata: SingleTimeSeriesMetadata, @@ -97,16 +122,14 @@ def _get_single_time_series( index, length = metadata.get_range(start_time=start_time, length=length) ts_data = ts_data[index : index + length] - if metadata.quantity_metadata is not None: - ts_data = metadata.quantity_metadata.quantity_type( - ts_data, metadata.quantity_metadata.units - ) + if metadata.units is not None: + ts_data = metadata.units.quantity_type(ts_data, metadata.units.units) assert ts_data is not None return SingleTimeSeries( uuid=metadata.time_series_uuid, - variable_name=metadata.variable_name, + name=metadata.name, resolution=metadata.resolution, - initial_time=start_time or metadata.initial_time, + initial_timestamp=start_time or metadata.initial_timestamp, data=ts_data, normalization=metadata.normalization, ) @@ -124,15 +147,13 @@ def _get_nonsequential_time_series( msg = f"No time series timestamps with {metadata.time_series_uuid} is stored" raise ISNotStored(msg) - if metadata.quantity_metadata is not None: - ts_data = metadata.quantity_metadata.quantity_type( - ts_data, metadata.quantity_metadata.units - ) + if metadata.units is not None: + ts_data = metadata.units.quantity_type(ts_data, metadata.units.units) assert ts_data is not None assert ts_timestamps is not None return NonSequentialTimeSeries( uuid=metadata.time_series_uuid, - variable_name=metadata.variable_name, + name=metadata.name, data=ts_data, timestamps=ts_timestamps, normalization=metadata.normalization, diff --git a/src/infrasys/migrations/__init__.py b/src/infrasys/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/infrasys/migrations/db_migrations.py b/src/infrasys/migrations/db_migrations.py new file mode 100644 index 0000000..040f64b --- /dev/null +++ b/src/infrasys/migrations/db_migrations.py @@ -0,0 +1,209 @@ +import json +import sqlite3 +import uuid +import warnings + +from loguru import logger + +from infrasys import ( + TIME_SERIES_ASSOCIATIONS_TABLE, + TIME_SERIES_METADATA_TABLE, +) +from infrasys.time_series_metadata_store import make_features_string +from infrasys.utils.metadata_utils import ( + create_associations_table, + create_key_value_store, +) +from infrasys.utils.sqlite import execute +from infrasys.utils.time_utils import str_timedelta_to_iso_8601 + +_LEGACY_METADATA_TABLE = "legacy_metadata_backup" + + +def metadata_store_needs_migration(conn: sqlite3.Connection, version: str | None = None) -> bool: + """Check if the database schema requires migration to the new format. + + Parameters + ---------- + conn : sqlite3.Connection + An active SQLite database connection. + + Returns + ------- + bool + True if migration is required (new table does not exist), False otherwise. + """ + cursor = conn.cursor() + query = "SELECT 1 FROM sqlite_master WHERE type='table' AND name=? LIMIT 1" + cursor.execute(query, (TIME_SERIES_ASSOCIATIONS_TABLE,)) + return not cursor.fetchone() is not None + + +def migrate_legacy_metadata_store(conn: sqlite3.Connection) -> bool: + """Migrate the database from the legacy schema to the new separated schema. + + Handles the transition from an older schema (where time series metadata and + associations were likely combined) to a newer schema featuring separate + `TIME_SERIES_ASSOCIATIONS_TABLE` and `KEY_VALUE_STORE_TABLE`. + + Parameters + ---------- + conn : sqlite3.Connection + An active SQLite database connection where the migration will be performed. + + Returns + ------- + bool + True if the migration was performed successfully. + + Notes + ----- + The migration process involves these steps: + 1. Verify the existing `TIME_SERIES_METADATA_TABLE` matches the expected + legacy column structure. + 2. Rename the legacy table to a temporary backup name. + 3. Create the new `KEY_VALUE_STORE_TABLE` and `TIME_SERIES_ASSOCIATIONS_TABLE`. + 4. Read data row-by-row from the backup table. + 5. Transform legacy data: + - Extract `user_attributes` from `metadata` JSON, renaming to `features`. + - Convert string timedelta `resolution` to ISO 8601 duration format. + - Set default `owner_category` to "Component". + - Set default empty JSON object for `serialization_info`. + 6. Insert transformed data into the new `TIME_SERIES_ASSOCIATIONS_TABLE`. + 7. Create required indexes on the new associations table. + 8. Drop the temporary backup table. + 9. Commit the transaction. + + Returns + ------- + bool: + True if migration was successful + """ + logger.info("Migrating legacy metadata schema.") + + legacy_columns = [ + "id", + "time_series_uuid", + "time_series_type", + "initial_time", + "resolution", + "variable_name", + "component_uuid", + "component_type", + "user_attributes_hash", + "metadata", + ] + cursor = conn.cursor() + + cursor.execute(f"SELECT * FROM {TIME_SERIES_METADATA_TABLE} LIMIT 1") + columns = [desc[0] for desc in cursor.description] + if not all(column in columns for column in legacy_columns): + logger.error(f"Legacy schema does not match expected columns: {columns}") + msg = "Bug: Legacy schema doesn't match expected structure" + raise NotImplementedError(msg) + + logger.debug("Creating backup tables.") + execute( + cursor, + f"ALTER TABLE {TIME_SERIES_METADATA_TABLE} RENAME TO {_LEGACY_METADATA_TABLE}", + ) + + logger.info("Creating new schema tables.") + create_key_value_store(connection=conn) + create_associations_table(connection=conn) + + logger.info("Migrating data from legacy schema.") + cursor.execute(f"SELECT * FROM {_LEGACY_METADATA_TABLE}") + rows = cursor.fetchall() + + sql_data_to_insert = [] + normalization_in_metadata = [] + for row in rows: + ( + id_val, + time_series_uuid, + time_series_type, + initial_timestamp, + resolution, + name, + owner_uuid, + owner_type, + features_hash, + metadata_json, + ) = row + + metadata = json.loads(metadata_json) + + # Creating a flatten metadata from legacy schema. + unit_metadata = metadata.pop("quantity_metadata") + + # Keep track if any metadata had normalization. + if "normalization" in metadata and metadata["normalization"]: + normalization_in_metadata.append(True) + + features_dict = {} + if metadata.get("user_attributes"): # We renamed user_attributes to features + features_dict = metadata.pop("user_attributes") + + owner_category = "Component" # Legacy system did not had any other category. + length = metadata.get("length", 0) + + # Old resolution was in timedelta format. + resolution = str_timedelta_to_iso_8601(resolution) + + # Fix for timestamp from: 2020-01-01 00:00 -> 2020-01-01T00:00 + initial_timestamp = initial_timestamp.replace(" ", "T") + sql_data_to_insert.append( + { + "time_series_uuid": time_series_uuid, + "time_series_type": time_series_type, + "initial_timestamp": initial_timestamp, + "resolution": resolution, + "length": length, + "name": name, + "owner_uuid": owner_uuid, + "owner_type": owner_type, + "owner_category": owner_category, + "features_json": make_features_string(features_dict), + "units": json.dumps(unit_metadata), + "metadata_uuid": str(uuid.uuid4()), # metadata_uuid did not exist on tehe legacy + } + ) + + # Raise warning for users that had normalization + if any(normalization_in_metadata): + msg = "Normalization of `TimeSeries` was deprecated from infrasys. " + msg += "Upgrader will drop this fields." + warnings.warn(msg) + + # Exit if there is no data to ingest. + if not sql_data_to_insert: + execute(cursor, f"DROP TABLE {_LEGACY_METADATA_TABLE}") + conn.commit() + logger.info("Schema migration completed.") + return True + + # If we do have data, we insert it + logger.info( + f"Inserting {len(sql_data_to_insert)} records into {TIME_SERIES_ASSOCIATIONS_TABLE}." + ) + cursor.executemany( + f""" + INSERT INTO `{TIME_SERIES_ASSOCIATIONS_TABLE}` ( + time_series_uuid, time_series_type, initial_timestamp, resolution, + length, name, owner_uuid, owner_type, owner_category, features, units, + metadata_uuid + ) VALUES ( + :time_series_uuid, :time_series_type, :initial_timestamp, :resolution, + :length, :name, :owner_uuid, :owner_type, :owner_category, + :features_json, :units, :metadata_uuid + ) + """, + sql_data_to_insert, + ) + + # Dropping legacy table since it is no longer required. + execute(cursor, f"DROP TABLE {_LEGACY_METADATA_TABLE}") + conn.commit() + logger.info("Schema migration completed.") + return True diff --git a/src/infrasys/migrations/metadata_migration.py b/src/infrasys/migrations/metadata_migration.py new file mode 100644 index 0000000..3157765 --- /dev/null +++ b/src/infrasys/migrations/metadata_migration.py @@ -0,0 +1,34 @@ +from infrasys.serialization import TYPE_METADATA + + +def component_needs_metadata_migration(component) -> bool: + """Check if we need to migrate to new metadata format.""" + metadata = component.get(TYPE_METADATA) + return "fields" in metadata + + +def migrate_component_metadata(component_list: list) -> list: + """Migrate legacy metadata for components. + + Checks each component dict for a nested '__metadata__["fields"]' structure + and flattens it by replacing '__metadata__' value with the 'fields' value. + """ + if not component_list: + return [] + for component in component_list: + metadata = component[TYPE_METADATA] + if isinstance(metadata, dict) and "fields" in metadata: + component[TYPE_METADATA] = metadata["fields"] + + for key, value in component.items(): + if isinstance(value, dict): + nested_metadata = value.get(TYPE_METADATA) + if isinstance(nested_metadata, dict) and "fields" in nested_metadata: + value[TYPE_METADATA] = nested_metadata["fields"] + elif isinstance(value, list): + if isinstance(value[0], dict): + nested_metadata = value[0].get(TYPE_METADATA) + if isinstance(nested_metadata, dict) and "fields" in nested_metadata: + component[key] = migrate_component_metadata(value) + + return component_list diff --git a/src/infrasys/models.py b/src/infrasys/models.py index 8b4fbe4..d46d2e4 100644 --- a/src/infrasys/models.py +++ b/src/infrasys/models.py @@ -40,7 +40,7 @@ def _serialize_uuid(self, _) -> str: def assign_new_uuid(self): """Generate a new UUID.""" self.uuid = uuid4() - logger.debug("Assigned new UUID for %s: %s", self.label, self.uuid) + logger.debug("Assigned new UUID for {}: {}", self.label, self.uuid) @classmethod def example(cls) -> "InfraSysBaseModelWithIdentifers": diff --git a/src/infrasys/serialization.py b/src/infrasys/serialization.py index 451add3..b379f68 100644 --- a/src/infrasys/serialization.py +++ b/src/infrasys/serialization.py @@ -1,13 +1,15 @@ import enum import importlib -from typing import Annotated, Any, Literal, Type, Union +from typing import Annotated, Any, Literal, Type, TypeAlias, Union from uuid import UUID -from pydantic import Field, field_serializer +from pydantic import Field, TypeAdapter, field_serializer from infrasys.models import InfraSysBaseModel +from infrasys.time_series_models import TimeSeriesData TYPE_METADATA = "__metadata__" +SERIALIZED_FIELDS = {"quantity_metadata", "normalization"} class SerializedType(str, enum.Enum): @@ -23,6 +25,7 @@ class SerializedTypeBase(InfraSysBaseModel): module: str type: str + model_config = {"extra": "ignore"} class SerializedBaseType(SerializedTypeBase): @@ -46,10 +49,16 @@ class SerializedQuantityType(SerializedTypeBase): serialized_type: Literal[SerializedType.QUANTITY] = SerializedType.QUANTITY -class SerializedTypeMetadata(InfraSysBaseModel): - """Serializes information about a type so that it can be de-serialized.""" - - fields: Annotated[ +MetadataType: TypeAlias = Annotated[ + Union[ + SerializedBaseType, + SerializedComponentReference, + SerializedQuantityType, + ], + Field(discriminator="serialized_type"), +] +SerializedTypeMetadata: TypeAdapter[MetadataType] = TypeAdapter( + Annotated[ Union[ SerializedBaseType, SerializedComponentReference, @@ -57,6 +66,7 @@ class SerializedTypeMetadata(InfraSysBaseModel): ], Field(discriminator="serialized_type"), ] +) class CachedTypeHelper: @@ -92,8 +102,8 @@ def serialize_value(obj: InfraSysBaseModel, *args, **kwargs) -> dict[str, Any]: """Serialize an infrasys object to a dictionary.""" cls = type(obj) data = obj.model_dump(*args, mode="json", round_trip=True, **kwargs) - data[TYPE_METADATA] = SerializedTypeMetadata( - fields=SerializedBaseType( + data[TYPE_METADATA] = SerializedTypeMetadata.validate_python( + SerializedBaseType( module=cls.__module__, type=cls.__name__, ), @@ -101,7 +111,7 @@ def serialize_value(obj: InfraSysBaseModel, *args, **kwargs) -> dict[str, Any]: return data -def deserialize_type(metadata: SerializedTypeBase) -> Type: +def deserialize_type(metadata: SerializedTypeBase) -> Type["TimeSeriesData"]: """Dynamically import the type and return it.""" return _deserialize_type(metadata.module, metadata.type) @@ -114,4 +124,7 @@ def _deserialize_type(module, obj_type) -> Type: def deserialize_value(data: dict[str, Any], metadata: SerializedTypeBase) -> Any: """Deserialize the value from a dictionary.""" ctype = deserialize_type(metadata) - return ctype(**data) + # We ignore any additional data. + return ctype.model_validate( + {key: value for key, value in data.items() if key in ctype.model_fields} + ) diff --git a/src/infrasys/supplemental_attribute.py b/src/infrasys/supplemental_attribute.py index 7ce7238..3c37e3f 100644 --- a/src/infrasys/supplemental_attribute.py +++ b/src/infrasys/supplemental_attribute.py @@ -24,7 +24,7 @@ def model_dump_custom(self, *args, **kwargs) -> dict[str, Any]: """Custom serialization for this package""" refs = {} - for x in self.model_fields: + for x in type(self).model_fields: val = self._model_dump_field(x) if val is not None: refs[x] = val @@ -36,8 +36,8 @@ def _model_dump_field(self, field) -> Any: val = getattr(self, field) if isinstance(val, BaseQuantity): data = val.to_dict() - data[TYPE_METADATA] = SerializedTypeMetadata( - fields=SerializedQuantityType( + data[TYPE_METADATA] = SerializedTypeMetadata.validate_python( + SerializedQuantityType( module=val.__module__, type=val.__class__.__name__, ), diff --git a/src/infrasys/supplemental_attribute_associations.py b/src/infrasys/supplemental_attribute_associations.py index 1b132ff..ad14c55 100644 --- a/src/infrasys/supplemental_attribute_associations.py +++ b/src/infrasys/supplemental_attribute_associations.py @@ -7,51 +7,24 @@ from loguru import logger -from infrasys import Component +from infrasys import Component, SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE +from infrasys.exceptions import ISAlreadyAttached from infrasys.supplemental_attribute import SupplementalAttribute from infrasys.utils.sqlite import execute -from infrasys.exceptions import ISAlreadyAttached +from infrasys.utils.metadata_utils import ( + create_supplemental_attribute_associations_table, +) -TABLE_NAME = "supplemental_attribute_associations" +TABLE_NAME = SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE class SupplementalAttributeAssociationsStore: """Stores supplemental attribute associations in a SQLite database.""" - TABLE_NAME = TABLE_NAME - def __init__(self, con: sqlite3.Connection, initialize: bool = True): self._con = con if initialize: - self._create_association_table() - self._create_indexes() - - def _create_association_table(self): - schema = [ - "id INTEGER PRIMARY KEY", - "attribute_uuid TEXT", - "attribute_type TEXT", - "component_uuid TEXT", - "component_type TEXT", - ] - schema_text = ",".join(schema) - cur = self._con.cursor() - execute(cur, f"CREATE TABLE {self.TABLE_NAME}({schema_text})") - self._con.commit() - logger.debug("Created in-memory time series metadata table") - - def _create_indexes(self) -> None: - cur = self._con.cursor() - execute( - cur, - f"CREATE INDEX IF NOT EXISTS by_attribute ON {self.TABLE_NAME} " - f"(attribute_uuid, component_uuid, component_type)", - ) - execute( - cur, - f"CREATE INDEX IF NOT EXISTS by_component ON {self.TABLE_NAME} " - f"(component_uuid, attribute_uuid, attribute_type)", - ) + create_supplemental_attribute_associations_table(self._con, table_name=TABLE_NAME) _ADD_ASSOCIATION_QUERY = f""" SELECT id FROM {TABLE_NAME} @@ -83,7 +56,7 @@ def add(self, component: Component, attribute: SupplementalAttribute) -> None: ) placeholder = ",".join(itertools.repeat("?", len(row))) - query = f"INSERT INTO {self.TABLE_NAME} VALUES ({placeholder})" + query = f"INSERT INTO {TABLE_NAME} VALUES ({placeholder})" execute(cur, query, params=row) self._con.commit() @@ -215,13 +188,13 @@ def remove_association( # logger.debug("Deleted %s supplemental attribute associations", num_deleted) def _remove_associations(self, where_clause: str, params: Sequence[Any]) -> int: - query = f"DELETE FROM {self.TABLE_NAME} {where_clause}" + query = f"DELETE FROM {TABLE_NAME} {where_clause}" cur = self._con.cursor() execute(cur, query, params) rows = execute(cur, "SELECT CHANGES() AS changes").fetchall() assert len(rows) == 1, rows row = rows[0] - logger.debug("Deleted %s rows from the time series metadata table", row[0]) + logger.debug("Deleted {} rows from the time series metadata table", row[0]) self._con.commit() return row[0] diff --git a/src/infrasys/system.py b/src/infrasys/system.py index e6364f6..e035987 100644 --- a/src/infrasys/system.py +++ b/src/infrasys/system.py @@ -1,52 +1,66 @@ """Defines a System""" -from contextlib import contextmanager -import json import shutil import sqlite3 -from operator import itemgetter +import tempfile +import zipfile from collections import defaultdict +from contextlib import contextmanager from datetime import datetime +from operator import itemgetter from pathlib import Path -from typing import Any, Callable, Generator, Iterable, Optional, Type, TypeVar +from typing import Any, Callable, Generator, Iterable, Literal, Optional, Type, TypeAlias, TypeVar from uuid import UUID, uuid4 +import orjson from loguru import logger from rich import print as _pprint from rich.table import Table -from infrasys.exceptions import ( - ISFileExists, +from .component import ( + Component, +) +from .component_manager import ComponentManager +from .exceptions import ( ISConflictingArguments, + ISFileExists, + ISInvalidParameter, ISOperationNotAllowed, ) -from infrasys.models import make_label -from infrasys.component import ( - Component, +from .migrations.db_migrations import ( + metadata_store_needs_migration, + migrate_legacy_metadata_store, ) -from infrasys.component_manager import ComponentManager -from infrasys.serialization import ( +from .migrations.metadata_migration import ( + component_needs_metadata_migration, + migrate_component_metadata, +) +from .models import make_label +from .serialization import ( + TYPE_METADATA, CachedTypeHelper, - SerializedTypeMetadata, SerializedBaseType, SerializedComponentReference, SerializedQuantityType, SerializedType, - TYPE_METADATA, + SerializedTypeMetadata, ) -from infrasys.supplemental_attribute import SupplementalAttribute -from infrasys.time_series_manager import TimeSeriesManager, TIME_SERIES_KWARGS -from infrasys.time_series_models import ( - DatabaseConnection, +from .supplemental_attribute import SupplementalAttribute +from .supplemental_attribute_manager import SupplementalAttributeManager +from .time_series_manager import TIME_SERIES_KWARGS, TimeSeriesManager +from .time_series_models import ( + SingleTimeSeries, TimeSeriesData, TimeSeriesKey, TimeSeriesMetadata, + TimeSeriesStorageContext, ) -from infrasys.supplemental_attribute_manager import SupplementalAttributeManager -from infrasys.utils.sqlite import backup, create_in_memory_db, restore +from .utils.sqlite import backup, create_in_memory_db, restore +from .utils.time_utils import from_iso_8601 T = TypeVar("T", bound="Component") U = TypeVar("U", bound="SupplementalAttribute") +FileMode: TypeAlias = Literal["r", "r+", "a"] class System: @@ -109,12 +123,46 @@ def __init__( self._supplemental_attr_mgr = ( supplemental_attribute_manager or SupplementalAttributeManager(self._con) ) + self._closed = False self._data_format_version: Optional[str] = None # Note to devs: if you add new fields, add support in to_json/from_json as appropriate. # TODO: add pretty printing of components and time series + def close(self) -> None: + """Close open resources such as SQLite connections.""" + if self._closed: + return + self._closed = True + try: + self._component_mgr.close() + except Exception: + logger.debug("Error closing component manager", exc_info=True) + + try: + self._time_series_mgr.close() + except Exception: + logger.debug("Error closing time series manager", exc_info=True) + + if self._con is not None: + try: + self._con.close() + except Exception: + logger.debug("Error closing system SQLite connection", exc_info=True) + + def __enter__(self) -> "System": + return self + + def __exit__(self, exc_type, exc, tb) -> None: + self.close() + + def __del__(self) -> None: + try: + self.close() + except Exception: + logger.debug("Error closing system in destructor", exc_info=True) + @property def auto_add_composed_components(self) -> bool: """Return the setting for auto_add_composed_components.""" @@ -190,11 +238,14 @@ def to_json(self, filename: Path | str, overwrite=False, indent=None, data=None) data["system"] = system_data backup(self._con, time_series_dir / self.DB_FILENAME) - self._time_series_mgr.serialize(system_data["time_series"], time_series_dir) + self._time_series_mgr.serialize( + system_data["time_series"], time_series_dir, db_name=self.DB_FILENAME + ) - with open(filename, "w", encoding="utf-8") as f_out: - json.dump(data, f_out, indent=indent) - logger.info("Wrote system data to {}", filename) + data_dump = orjson.dumps(data) + with open(filename, "wb") as f_out: + f_out.write(data_dump) + logger.info("Wrote system data to {}", filename) @classmethod def from_json( @@ -215,13 +266,117 @@ def from_json( -------- >>> system = System.from_json("systems/system1.json") """ - with open(filename, encoding="utf-8") as f_in: - data = json.load(f_in) + with open(filename, "rb") as f_in: + data = orjson.loads(f_in.read()) time_series_parent_dir = Path(filename).parent return cls.from_dict( data, time_series_parent_dir, upgrade_handler=upgrade_handler, **kwargs ) + @classmethod + def load( + cls, + zip_path: Path | str, + time_series_directory: Path | str | None = None, + upgrade_handler: Callable | None = None, + **kwargs: Any, + ) -> "System": + """Load a System from a zip archive created by the save() method. + + The zip file will be extracted to a temporary directory, the system will be + deserialized, and the temporary files will be cleaned up automatically. + Time series storage files are copied to a permanent location during deserialization. + + Parameters + ---------- + zip_path : Path | str + Path to the zip file containing the system. + time_series_directory: Path | str + Path to the final time series location + upgrade_handler : Callable | None + Optional function to handle data format upgrades. Should only be set when the parent + package composes this package. If set, it will be called before de-serialization of + the components. + **kwargs : Any + Additional arguments passed to the System constructor. Refer to System constructor + for available options. Use `time_series_directory` to specify where time series + files should be stored. + + Returns + ------- + System + The deserialized system. + + Raises + ------ + ISFileExists + Raised if the zip file does not exist. + ISInvalidParameter + Raised if the zip file is not a valid zip archive or doesn't contain a valid system. + FileNotFoundError + Raised if there is no JSON file in the zip folder. + + Examples + -------- + >>> system = System.load("my_system.zip") + >>> system2 = System.load(Path("archived_systems/system1.zip")) + >>> # Specify where time series files should be stored + >>> system3 = System.load("my_system.zip", time_series_directory="/path/to/storage") + + See Also + -------- + save : Save a system to a directory or zip file + from_json : Load a system from a JSON file + """ + if isinstance(zip_path, str): + zip_path = Path(zip_path) + + if not zip_path.exists(): + msg = f"Zip file does not exist: {zip_path}" + raise FileNotFoundError(msg) + + if not zipfile.is_zipfile(zip_path): + msg = f"File is not a valid zip archive: {zip_path}" + raise ISInvalidParameter(msg) + + # Create a temporary directory for extraction + with tempfile.TemporaryDirectory(dir=time_series_directory) as temp_dir: + temp_path = Path(temp_dir) + + try: + with zipfile.ZipFile(zip_path, "r") as zip_ref: + zip_ref.extractall(temp_path) + logger.debug("Extracted {} to temporary directory {}", zip_path, temp_path) + except (zipfile.BadZipFile, OSError) as e: + msg = f"Failed to extract zip file {zip_path}: {e}" + raise ISInvalidParameter(msg) from e + + # We need to find the JSON files since Zips can have different names + json_files = list(temp_path.rglob("*.json")) + + if not json_files: + msg = f"No JSON file found in zip archive: {zip_path}" + raise ISInvalidParameter(msg) + + if len(json_files) > 1: + msg = ( + f"Multiple JSON files found in zip archive: {zip_path}. " + f"Expected exactly one system JSON file." + ) + raise ISOperationNotAllowed(msg) + + json_file = json_files[0] + logger.debug("Found system JSON file: {}", json_file) + + kwargs["time_series_directory"] = time_series_directory + try: + system = cls.from_json(json_file, upgrade_handler=upgrade_handler, **kwargs) + logger.info("Loaded system from {}", zip_path) + except (OSError, KeyError, ValueError, TypeError) as e: + msg = f"Failed to deserialize system from {json_file}: {e}" + raise ISInvalidParameter(msg) from e + return system + def to_records( self, component_type: Type[Component], @@ -301,6 +456,10 @@ def from_dict( ) con = create_in_memory_db() restore(con, ts_path / data["time_series"]["directory"] / System.DB_FILENAME) + + if metadata_store_needs_migration(con): + migrate_legacy_metadata_store(con) + time_series_manager = TimeSeriesManager.deserialize( con, data["time_series"], ts_path, **ts_kwargs ) @@ -329,6 +488,9 @@ def from_dict( system.data_format_version, ) system.deserialize_system_attributes(system_data) + + if component_needs_metadata_migration(system_data["components"][0]): + system_data["components"] = migrate_component_metadata(system_data["components"]) system._deserialize_components(system_data["components"]) system._deserialize_supplemental_attributes(system_data["supplemental_attributes"]) logger.info("Deserialized system {}", system.label) @@ -847,8 +1009,8 @@ def remove_component( self.remove_time_series( component, time_series_type=metadata.get_time_series_data_type(), - variable_name=metadata.variable_name, - **metadata.user_attributes, + name=metadata.name, + **metadata.features, ) self._component_mgr.remove(component, cascade_down=cascade_down, force=force) @@ -918,8 +1080,8 @@ def remove_supplemental_attribute(self, attribute: SupplementalAttribute) -> Non self.remove_time_series( attribute, time_series_type=metadata.get_time_series_data_type(), - variable_name=metadata.variable_name, - **metadata.user_attributes, + name=metadata.name, + **metadata.features, ) return self._supplemental_attr_mgr.remove(attribute) @@ -961,8 +1123,8 @@ def add_time_series( self, time_series: TimeSeriesData, *owners: Component | SupplementalAttribute, - connection: DatabaseConnection | None = None, - **user_attributes: Any, + context: TimeSeriesStorageContext | None = None, + **features: Any, ) -> TimeSeriesKey: """Store a time series array for one or more components or supplemental attributes. @@ -972,7 +1134,7 @@ def add_time_series( Time series data to store. owners : Component | SupplementalAttribute Add the time series to all of these components or supplemental attributes. - user_attributes : Any + features : Any Key/value pairs to store with the time series data. Must be JSON-serializable. Returns @@ -994,7 +1156,7 @@ def add_time_series( >>> gen2 = system.get_component(Generator, "gen2") >>> ts = SingleTimeSeries.from_array( data=[0.86, 0.78, 0.81, 0.85, 0.79], - variable_name="active_power", + name="active_power", start_time=datetime(year=2030, month=1, day=1), resolution=timedelta(hours=1), ) @@ -1003,8 +1165,8 @@ def add_time_series( return self._time_series_mgr.add( time_series, *owners, - connection=connection, - **user_attributes, + context=context, + **features, ) def copy_time_series( @@ -1042,12 +1204,12 @@ def copy_time_series( def get_time_series( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, + name: str | None = None, time_series_type: Type[TimeSeriesData] | None = None, start_time: datetime | None = None, length: int | None = None, - connection: DatabaseConnection | None = None, - **user_attributes: str, + context: TimeSeriesStorageContext | None = None, + **features: str, ) -> Any: """Return a time series array. @@ -1055,7 +1217,7 @@ def get_time_series( ---------- component : Component Component to which the time series must be attached. - variable_name : str | None + name : str | None Optional, search for time series with this name. Required if the other inputs will match more than one time series. time_series_type : Type[TimeSeriesData] | None @@ -1065,9 +1227,9 @@ def get_time_series( If not None, take a slice of the time series starting at this time. length : int | None If not None, take a slice of the time series with this length. - user_attributes : str + features : str Optional, search for time series with these attributes. - connection + context: TimeSeriesStorageContext Optional, connection returned by :meth:`open_time_series_store` Raises @@ -1095,12 +1257,12 @@ def get_time_series( """ return self._time_series_mgr.get( owner, - variable_name=variable_name, + name=name, time_series_type=time_series_type, start_time=start_time, length=length, - connection=connection, - **user_attributes, + context=context, + **features, ) def get_time_series_by_key( @@ -1112,9 +1274,9 @@ def get_time_series_by_key( def has_time_series( self, owner: Component | SupplementalAttribute, - variable_name: Optional[str] = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: str, + name: Optional[str] = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: str, ) -> bool: """Return True if the component has time series matching the inputs. @@ -1122,28 +1284,28 @@ def has_time_series( ---------- component : Component Component to check for matching time series. - variable_name : str | None + name : str | None Optional, search for time series with this name. time_series_type : Type[TimeSeriesData] Optional, search for time series with this type. - user_attributes : str + features : str Optional, search for time series with these attributes. """ return self.time_series.has_time_series( owner, - variable_name=variable_name, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) def list_time_series( self, component: Component, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, start_time: datetime | None = None, length: int | None = None, - **user_attributes: Any, + **features: Any, ) -> list[TimeSeriesData]: """Return all time series that match the inputs. @@ -1151,7 +1313,7 @@ def list_time_series( ---------- component : Component Component to which the time series must be attached. - variable_name : str | None + name : str | None Optional, search for time series with this name. time_series_type : Type[TimeSeriesData] | None Optional, search for time series with this type. @@ -1159,7 +1321,7 @@ def list_time_series( If not None, take a slice of the time series starting at this time. length : int | None If not None, take a slice of the time series with this length. - user_attributes : str + features : str Optional, search for time series with these attributes. Examples @@ -1170,19 +1332,19 @@ def list_time_series( """ return self._time_series_mgr.list_time_series( component, - variable_name=variable_name, + name=name, time_series_type=time_series_type, start_time=start_time, length=length, - **user_attributes, + **features, ) def list_time_series_keys( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: Any, ) -> list[TimeSeriesKey]: """Return all time series keys that match the inputs. @@ -1190,11 +1352,11 @@ def list_time_series_keys( ---------- owner : Component | SupplementalAttribute Component to which the time series must be attached. - variable_name : str | None + name : str | None Optional, search for time series with this name. time_series_type : Type[TimeSeriesData] | None Optional, search for time series with this type. - user_attributes : str + features : str Optional, search for time series with these attributes. Examples @@ -1205,17 +1367,17 @@ def list_time_series_keys( """ return self.time_series.list_time_series_keys( owner, - variable_name=variable_name, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) def list_time_series_metadata( self, component: Component, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: Any, ) -> list[TimeSeriesMetadata]: """Return all time series metadata that match the inputs. @@ -1223,11 +1385,11 @@ def list_time_series_metadata( ---------- component : Component Component to which the time series must be attached. - variable_name : str | None + name : str | None Optional, search for time series with this name. time_series_type : Type[TimeSeriesData] | None Optional, search for time series with this type. - user_attributes : str + features : str Optional, search for time series with these attributes. Examples @@ -1238,17 +1400,17 @@ def list_time_series_metadata( """ return self.time_series.list_time_series_metadata( component, - variable_name=variable_name, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) def remove_time_series( self, *owners: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: Any, ) -> None: """Remove all time series arrays attached to the components or supplemental attributes matching the inputs. @@ -1257,11 +1419,11 @@ def remove_time_series( ---------- owners Affected components or supplemental attributes - variable_name : str | None + name : str | None Optional, search for time series with this name. - time_series_type : Type[TimeSeriesData] | None - Optional, only remove time series with this type. - user_attributes : str + time_series_type : Type[TimeSeriesData] + Optional, search for time series with this type. + features : str Optional, search for time series with these attributes. Raises @@ -1278,13 +1440,15 @@ def remove_time_series( """ return self._time_series_mgr.remove( *owners, - variable_name=variable_name, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) @contextmanager - def open_time_series_store(self) -> Generator[DatabaseConnection, None, None]: + def open_time_series_store( + self, mode: FileMode = "r+" + ) -> Generator[TimeSeriesStorageContext, None, None]: """Open a connection to the time series store. This can improve performance when reading or writing many time series arrays for specific backends (chronify and HDF5). It will also rollback any changes if an exception is raised. @@ -1298,10 +1462,10 @@ def open_time_series_store(self) -> Generator[DatabaseConnection, None, None]: Examples -------- >>> with system.open_time_series_store() as conn: - ... system.add_time_series(ts1, gen1, connection=conn) - ... system.add_time_series(ts2, gen1, connection=conn) + ... system.add_time_series(ts1, gen1) + ... system.add_time_series(ts2, gen1) """ - with self._time_series_mgr.open_time_series_store() as conn: + with self._time_series_mgr.open_time_series_store(mode=mode) as conn: yield conn def serialize_system_attributes(self) -> dict[str, Any]: @@ -1435,9 +1599,9 @@ def _deserialize_components_first_pass( for component_dict in components: component = self._try_deserialize_component(component_dict, cached_types) if component is None: - metadata = SerializedTypeMetadata(**component_dict[TYPE_METADATA]) - assert isinstance(metadata.fields, SerializedBaseType) - component_type = cached_types.get_type(metadata.fields) + metadata = SerializedTypeMetadata.validate_python(component_dict[TYPE_METADATA]) + assert isinstance(metadata, SerializedBaseType) + component_type = cached_types.get_type(metadata) skipped_types[component_type].append(component_dict) else: deserialized_types.add(type(component)) @@ -1479,8 +1643,8 @@ def _try_deserialize_component( if values is None: return None - metadata = SerializedTypeMetadata(**component[TYPE_METADATA]) - component_type = cached_types.get_type(metadata.fields) + metadata = SerializedTypeMetadata.validate_python(component[TYPE_METADATA]) + component_type = cached_types.get_type(metadata) actual_component = component_type(**values) self._components.add(actual_component, deserialization_in_progress=True) return actual_component @@ -1491,16 +1655,14 @@ def _deserialize_fields( values = {} for field, value in component.items(): if isinstance(value, dict) and TYPE_METADATA in value: - metadata = SerializedTypeMetadata(**value[TYPE_METADATA]) - if isinstance(metadata.fields, SerializedComponentReference): - composed_value = self._deserialize_composed_value( - metadata.fields, cached_types - ) + metadata = SerializedTypeMetadata.validate_python(value[TYPE_METADATA]) + if isinstance(metadata, SerializedComponentReference): + composed_value = self._deserialize_composed_value(metadata, cached_types) if composed_value is None: return None values[field] = composed_value - elif isinstance(metadata.fields, SerializedQuantityType): - quantity_type = cached_types.get_type(metadata.fields) + elif isinstance(metadata, SerializedQuantityType): + quantity_type = cached_types.get_type(metadata) values[field] = quantity_type(value=value["value"], units=value["units"]) else: msg = f"Bug: unhandled type: {field=} {value=}" @@ -1510,11 +1672,11 @@ def _deserialize_fields( and value and isinstance(value[0], dict) and TYPE_METADATA in value[0] - and value[0][TYPE_METADATA]["fields"]["serialized_type"] + and value[0][TYPE_METADATA]["serialized_type"] == SerializedType.COMPOSED_COMPONENT.value ): - metadata = SerializedTypeMetadata(**value[0][TYPE_METADATA]) - assert isinstance(metadata.fields, SerializedComponentReference) + metadata = SerializedTypeMetadata.validate_python(value[0][TYPE_METADATA]) + assert isinstance(metadata, SerializedComponentReference) composed_values = self._deserialize_composed_list(value, cached_types) if composed_values is None: return None @@ -1537,11 +1699,11 @@ def _deserialize_composed_list( ) -> list[Any] | None: deserialized_components = [] for component in components: - metadata = SerializedTypeMetadata(**component[TYPE_METADATA]) - assert isinstance(metadata.fields, SerializedComponentReference) - component_type = cached_types.get_type(metadata.fields) + metadata = SerializedTypeMetadata.validate_python(component[TYPE_METADATA]) + assert isinstance(metadata, SerializedComponentReference) + component_type = cached_types.get_type(metadata) if cached_types.allowed_to_deserialize(component_type): - deserialized_components.append(self._components.get_by_uuid(metadata.fields.uuid)) + deserialized_components.append(self._components.get_by_uuid(metadata.uuid)) else: return None return deserialized_components @@ -1552,8 +1714,8 @@ def _deserialize_supplemental_attributes( """Deserialize supplemental_attributes from dictionaries and add them to the system.""" cached_types = CachedTypeHelper() for sa_dict in supplemental_attributes: - metadata = SerializedTypeMetadata(**sa_dict[TYPE_METADATA]) - supplemental_attribute_type = cached_types.get_type(metadata.fields) + metadata = SerializedTypeMetadata.validate_python(sa_dict[TYPE_METADATA]) + supplemental_attribute_type = cached_types.get_type(metadata) values = self._deserialize_fields(sa_dict, cached_types) attr = supplemental_attribute_type(**values) self._supplemental_attr_mgr.add(None, attr, deserialization_in_progress=True) @@ -1563,13 +1725,72 @@ def _deserialize_supplemental_attributes( def _make_time_series_directory(filename: Path) -> Path: return filename.parent / (filename.stem + "_time_series") - def show_components(self, component_type): - # Filtered view of certain concrete types (not really concrete types) - # We can implement custom printing if we want - # Dan suggest to remove UUID, system.UUID from component. - # Nested components gets special handling. - # What we do with components w/o names? Use .label for nested components. - raise NotImplementedError + def show_components( + self, + component_type: Type[Component], + show_uuid: bool = False, + show_time_series: bool = False, + show_supplemental: bool = False, + ) -> None: + """Display a table of components of the specified type. + + Parameters + ---------- + component_type : Type[Component] + The type of components to display. If component_type is an abstract type, + all matching subtypes will be included. + show_uuid : bool + Whether to include the UUID column in the table. Defaults to False. + show_time_series : bool + Whether to include the Time Series count column in the table. Defaults to False. + show_time_series : bool + Whether to include the Supplemental Attributes count column in the table. Defaults to False. + + Examples + -------- + >>> system.show_components(Generator) # Shows only names + >>> system.show_components(Bus, show_uuid=True) + >>> system.show_components(Generator, show_time_series=True) + >>> system.show_components(Generator, show_supplemental=True) + """ + components = list(self.get_components(component_type)) + + if not components: + logger.warning(f"No components of type {component_type.__name__} found in the system.") + return + + table = Table( + title=f"{component_type.__name__}: {len(components)}", + show_header=True, + title_justify="left", + title_style="bold", + ) + table.add_column("Name", min_width=20, justify="left") + + if show_uuid: + table.add_column("UUID", min_width=36, justify="left") + if show_time_series: + table.add_column("Has Time Series", min_width=12, justify="right") + if show_supplemental: + table.add_column("Has Supplemental Attributes", min_width=12, justify="right") + + sorted_components = sorted(components, key=lambda x: getattr(x, "name", x.label)) + + for component in sorted_components: + row_data = [component.name] + + if show_uuid: + row_data.append(str(component.uuid)) + if show_time_series: + row_data.append(str(len(self.list_time_series_metadata(component)))) + if show_supplemental: + row_data.append( + str(len(self.get_supplemental_attributes_with_component(component))) + ) + + table.add_row(*row_data) + + _pprint(table) def info(self): info = SystemInfo(system=self) @@ -1603,6 +1824,7 @@ def render(self) -> None: component_type_count, time_series_type_count, ) = self.extract_system_counts() + owner_type_count = self._get_owner_type_counts(component_type_count) # System table system_table = Table( @@ -1617,6 +1839,8 @@ def render(self) -> None: system_table.add_row("Data format version", self.system._data_format_version) system_table.add_row("Components attached", f"{component_count}") system_table.add_row("Time Series attached", f"{time_series_count}") + total_suppl_attrs = self.system.get_num_supplemental_attributes() + system_table.add_row("Supplemental Attributes attached", f"{total_suppl_attrs}") system_table.add_row("Description", self.system.description) _pprint(system_table) @@ -1645,7 +1869,7 @@ def render(self) -> None: title_justify="left", title_style="bold", ) - time_series_table.add_column("Component Type", min_width=20) + time_series_table.add_column("Owner Type", min_width=20) time_series_table.add_column("Time Series Type", justify="right") time_series_table.add_column("Initial time", justify="right") time_series_table.add_column("Resolution", justify="right") @@ -1658,14 +1882,26 @@ def render(self) -> None: time_series_start_time, time_series_resolution, ), time_series_count in sorted(time_series_type_count.items(), key=itemgetter(slice(4))): + owner_count = owner_type_count.get(component_type, 0) time_series_table.add_row( f"{component_type}", f"{time_series_type}", f"{time_series_start_time}", - f"{time_series_resolution}", - f"{component_type_count[component_type]}", + f"{from_iso_8601(time_series_resolution)}", + f"{owner_count}", f"{time_series_count}", ) if time_series_table.rows: _pprint(time_series_table) + + def _get_owner_type_counts(self, component_type_count: dict[str, int]) -> dict[str, int]: + """Combine component and supplemental attribute counts by type for summary tables.""" + owner_type_count = dict(component_type_count) + supplemental_attribute_counts: dict[str, int] = defaultdict(int) + + for attribute in self.system._supplemental_attr_mgr.iter_all(): + supplemental_attribute_counts[type(attribute).__name__] += 1 + + owner_type_count.update(supplemental_attribute_counts) + return owner_type_count diff --git a/src/infrasys/time_series_manager.py b/src/infrasys/time_series_manager.py index 5b0a0ab..ca1f6ac 100644 --- a/src/infrasys/time_series_manager.py +++ b/src/infrasys/time_series_manager.py @@ -1,44 +1,65 @@ """Manages time series arrays""" -from contextlib import contextmanager +import atexit import sqlite3 +import tempfile +from contextlib import contextmanager from datetime import datetime from functools import singledispatch from pathlib import Path -from typing import Any, Generator, Optional, Type +from tempfile import mkdtemp +from typing import Any, Generator, Literal, Optional, Type +import h5py +import numpy as np from loguru import logger -from infrasys.arrow_storage import ArrowTimeSeriesStorage -from infrasys import Component -from infrasys.exceptions import ISInvalidParameter, ISOperationNotAllowed -from infrasys.in_memory_time_series_storage import InMemoryTimeSeriesStorage -from infrasys.supplemental_attribute import SupplementalAttribute -from infrasys.time_series_metadata_store import TimeSeriesMetadataStore -from infrasys.time_series_models import ( - DatabaseConnection, +from . import TIME_SERIES_ASSOCIATIONS_TABLE +from .arrow_storage import ArrowTimeSeriesStorage +from .component import Component +from .exceptions import ISInvalidParameter, ISOperationNotAllowed +from .h5_time_series_storage import HDF5TimeSeriesStorage +from .in_memory_time_series_storage import InMemoryTimeSeriesStorage +from .supplemental_attribute import SupplementalAttribute +from .time_series_metadata_store import TimeSeriesMetadataStore +from .time_series_models import ( + DeterministicMetadata, + DeterministicTimeSeriesKey, + NonSequentialTimeSeries, + NonSequentialTimeSeriesKey, + NonSequentialTimeSeriesMetadata, SingleTimeSeries, SingleTimeSeriesKey, SingleTimeSeriesMetadata, - NonSequentialTimeSeries, - NonSequentialTimeSeriesMetadata, - NonSequentialTimeSeriesKey, TimeSeriesData, TimeSeriesKey, TimeSeriesMetadata, + TimeSeriesStorageContext, TimeSeriesStorageType, ) -from infrasys.time_series_storage_base import TimeSeriesStorageBase +from .time_series_storage_base import TimeSeriesStorageBase +from .utils.path_utils import clean_tmp_folder +from .utils.sqlite import has_table try: - from infrasys.chronify_time_series_storage import ChronifyTimeSeriesStorage + from .chronify_time_series_storage import ChronifyTimeSeriesStorage is_chronify_installed = True except ImportError: is_chronify_installed = False +def is_h5py_installed(): + try: + import h5py # noqa: F401 + + return True + except ImportError: + return False + + TIME_SERIES_KWARGS = { + "in_memory": False, "time_series_read_only": False, "time_series_directory": None, "time_series_storage_type": TimeSeriesStorageType.ARROW, @@ -46,6 +67,16 @@ } +TIME_SERIES_REGISTRY: dict[TimeSeriesStorageType, type[TimeSeriesStorageBase]] = { + TimeSeriesStorageType.ARROW: ArrowTimeSeriesStorage, + TimeSeriesStorageType.HDF5: HDF5TimeSeriesStorage, + TimeSeriesStorageType.MEMORY: InMemoryTimeSeriesStorage, +} + +if is_chronify_installed: + TIME_SERIES_REGISTRY[TimeSeriesStorageType.CHRONIFY] = ChronifyTimeSeriesStorage + + def _process_time_series_kwarg(key: str, **kwargs: Any) -> Any: return kwargs.get(key, TIME_SERIES_KWARGS[key]) @@ -58,23 +89,43 @@ def __init__( con: sqlite3.Connection, storage: Optional[TimeSeriesStorageBase] = None, initialize: bool = True, + metadata_store: TimeSeriesMetadataStore | None = None, **kwargs, ) -> None: self._con = con - self._metadata_store = TimeSeriesMetadataStore(con, initialize=initialize) + self._metadata_store = metadata_store or TimeSeriesMetadataStore( + con, initialize=initialize + ) self._read_only = _process_time_series_kwarg("time_series_read_only", **kwargs) self._storage = storage or self.create_new_storage(**kwargs) + self._context: TimeSeriesStorageContext | None = None # TODO: create parsing mechanism? CSV, CSV + JSON + def close(self) -> None: + """Release resources held by the storage backend.""" + storage = getattr(self, "_storage", None) + for attr in ("close", "dispose"): + func = getattr(storage, attr, None) + if callable(func): + try: + func() + except Exception: + logger.debug("Error closing time series storage", exc_info=True) + break + @staticmethod - def create_new_storage(permanent: bool = False, **kwargs): + def create_new_storage(permanent: bool = False, **kwargs): # noqa: C901 base_directory: Path | None = _process_time_series_kwarg("time_series_directory", **kwargs) storage_type = _process_time_series_kwarg("time_series_storage_type", **kwargs) if permanent: if base_directory is None: msg = "Can't convert to permanent storage without a base directory" raise ISInvalidParameter(msg) + if not base_directory: + base_directory = Path(mkdtemp(dir=base_directory)) + logger.debug("Creating tmp folder at {}", base_directory) + atexit.register(clean_tmp_folder, base_directory) match storage_type: case TimeSeriesStorageType.ARROW: @@ -105,6 +156,12 @@ def create_new_storage(permanent: bool = False, **kwargs): ) case TimeSeriesStorageType.MEMORY: return InMemoryTimeSeriesStorage() + case TimeSeriesStorageType.HDF5: + if not is_h5py_installed(): + msg = f"`{storage_type}` backend requires `h5py` to be installed. " + msg += 'Install it using `pip install "infrasys[h5]".' + raise ImportError(msg) + return HDF5TimeSeriesStorage(base_directory, **kwargs) case _: msg = f"{storage_type=}" raise NotImplementedError(msg) @@ -123,8 +180,8 @@ def add( self, time_series: TimeSeriesData, *owners: Component | SupplementalAttribute, - connection: DatabaseConnection | None = None, - **user_attributes: Any, + context: TimeSeriesStorageContext | None = None, + **features: Any, ) -> TimeSeriesKey: """Store a time series array for one or more components or supplemental attributes. @@ -136,7 +193,7 @@ def add( Add the time series to all of these components or supplemental attributes. connection Optional connection to use for the operation. - user_attributes : Any + features : Any Key/value pairs to store with the time series data. Must be JSON-serializable. Raises @@ -148,6 +205,7 @@ def add( Raised if the manager was created in read-only mode. """ self._handle_read_only() + context = context or self._context if not owners: msg = "add_time_series requires at least one component or supplemental attribute" raise ISOperationNotAllowed(msg) @@ -157,30 +215,28 @@ def add( msg = f"The first argument must be an instance of TimeSeriesData: {ts_type}" raise ValueError(msg) metadata_type = ts_type.get_time_series_metadata_type() - metadata = metadata_type.from_data(time_series, **user_attributes) + metadata = metadata_type.from_data(time_series, **features) data_is_stored = self._metadata_store.has_time_series(time_series.uuid) # Call this first because it could raise an exception. - self._metadata_store.add( - metadata, *owners, connection=_get_metadata_connection(connection) - ) + self._metadata_store.add(metadata, *owners, connection=_get_metadata_connection(context)) if not data_is_stored: self._storage.add_time_series( metadata, time_series, - connection=_get_data_connection(connection), + context=_get_data_context(context), ) return make_time_series_key(metadata) def get( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, + name: str | None = None, time_series_type: Type[TimeSeriesData] | None = None, start_time: datetime | None = None, length: int | None = None, - connection: DatabaseConnection | None = None, - **user_attributes, + context: TimeSeriesStorageContext | None = None, + **features, ) -> TimeSeriesData: """Return a time series array. @@ -198,105 +254,103 @@ def get( """ metadata = self._metadata_store.get_metadata( owner, - variable_name=variable_name, + name=name, time_series_type=time_series_type.__name__ if time_series_type else None, - **user_attributes, + **features, ) return self._get_by_metadata( - metadata, start_time=start_time, length=length, connection=connection + metadata, start_time=start_time, length=length, context=context ) def get_by_key( self, owner: Component | SupplementalAttribute, key: TimeSeriesKey, - connection: DatabaseConnection | None = None, + connection: TimeSeriesStorageContext | None = None, ) -> TimeSeriesData: """Return a time series array by key.""" metadata = self._metadata_store.get_metadata( owner, - variable_name=key.variable_name, + name=key.name, time_series_type=key.time_series_type.__name__, - **key.user_attributes, + **key.features, ) - return self._get_by_metadata(metadata, connection=connection) + return self._get_by_metadata(metadata, context=connection) def has_time_series( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features, ) -> bool: """Return True if the component or supplemental atttribute has time series matching the inputs. """ return self._metadata_store.has_time_series_metadata( owner, - variable_name=variable_name, - time_series_type=time_series_type.__name__ if time_series_type else None, - **user_attributes, + name=name, + time_series_type=time_series_type.__name__, + **features, ) def list_time_series( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, start_time: datetime | None = None, length: int | None = None, - connection: DatabaseConnection | None = None, - **user_attributes: Any, + connection: TimeSeriesStorageContext | None = None, + **features: Any, ) -> list[TimeSeriesData]: """Return all time series that match the inputs.""" metadata = self.list_time_series_metadata( owner, - variable_name=variable_name, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) return [ - self._get_by_metadata(x, start_time=start_time, length=length, connection=connection) + self._get_by_metadata(x, start_time=start_time, length=length, context=connection) for x in metadata ] def list_time_series_keys( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: Any, ) -> list[TimeSeriesKey]: """Return all time series keys that match the inputs.""" return [ make_time_series_key(x) - for x in self.list_time_series_metadata( - owner, variable_name, time_series_type, **user_attributes - ) + for x in self.list_time_series_metadata(owner, name, time_series_type, **features) ] def list_time_series_metadata( self, owner: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + **features: Any, ) -> list[TimeSeriesMetadata]: """Return all time series metadata that match the inputs.""" return self._metadata_store.list_metadata( owner, - variable_name=variable_name, - time_series_type=time_series_type.__name__ if time_series_type else None, - **user_attributes, + name=name, + time_series_type=time_series_type.__name__, + **features, ) def remove( self, *owners: Component | SupplementalAttribute, - variable_name: str | None = None, - time_series_type: Type[TimeSeriesData] | None = None, - connection: DatabaseConnection | None = None, - **user_attributes: Any, + name: str | None = None, + time_series_type: Type[TimeSeriesData] = SingleTimeSeries, + context: TimeSeriesStorageContext | None = None, + **features: Any, ): """Remove all time series arrays matching the inputs. @@ -310,18 +364,16 @@ def remove( self._handle_read_only() metadata = self._metadata_store.remove( *owners, - variable_name=variable_name, - time_series_type=time_series_type.__name__ if time_series_type else None, - connection=_get_metadata_connection(connection), - **user_attributes, + name=name, + time_series_type=time_series_type.__name__, + connection=_get_metadata_connection(context), + **features, ) time_series = {x.time_series_uuid: x for x in metadata} missing_uuids = self._metadata_store.list_missing_time_series(time_series.keys()) for uuid in missing_uuids: - self._storage.remove_time_series( - time_series[uuid], connection=_get_data_connection(connection) - ) - logger.info("Removed time series {}", uuid) + self._storage.remove_time_series(time_series[uuid], context=_get_data_context(context)) + logger.info("Removed time series {}.{}", time_series_type, name) def copy( self, @@ -355,20 +407,24 @@ def _get_by_metadata( metadata: TimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: DatabaseConnection | None = None, + context: TimeSeriesStorageContext | None = None, ) -> TimeSeriesData: return self._storage.get_time_series( metadata, start_time=start_time, length=length, - connection=_get_data_connection(connection), + context=_get_data_context(context), ) def serialize( - self, data: dict[str, Any], dst: Path | str, src: Optional[Path | str] = None + self, + data: dict[str, Any], + dst: Path | str, + db_name: str, + src: Path | str | None = None, ) -> None: """Serialize the time series data to dst.""" - if isinstance(self._storage, InMemoryTimeSeriesStorage): + if isinstance(self.storage, InMemoryTimeSeriesStorage): new_storage = self.convert_storage( time_series_storage_type=TimeSeriesStorageType.ARROW, time_series_directory=dst, @@ -377,7 +433,22 @@ def serialize( ) assert isinstance(new_storage, ArrowTimeSeriesStorage) new_storage.add_serialized_data(data) + self._metadata_store.serialize(Path(dst) / db_name) + elif isinstance(self.storage, HDF5TimeSeriesStorage): + self.storage.serialize(data, dst, src=src) + with tempfile.TemporaryDirectory() as tmpdirname: + temp_file_path = Path(tmpdirname) / db_name + self._metadata_store.serialize(temp_file_path) + with open(temp_file_path, "rb") as f: + binary_data = f.read() + with h5py.File(str(self.storage.output_file), "a") as f_out: + f_out.create_dataset( + self.storage.HDF5_TS_METADATA_ROOT_PATH, + data=np.frombuffer(binary_data, dtype=np.uint8), + dtype=np.uint8, + ) else: + self._metadata_store.serialize(Path(dst) / db_name) self._storage.serialize(data, dst, src=src) @classmethod @@ -404,44 +475,44 @@ def deserialize( raise FileNotFoundError(msg) read_only = _process_time_series_kwarg("time_series_read_only", **kwargs) time_series_dir = Path(parent_dir) / data["directory"] - storage: TimeSeriesStorageBase # This term was introduced in v0.3.0. Maintain compatibility with old serialized files. ts_type = data.get("time_series_storage_type", TimeSeriesStorageType.ARROW) - match ts_type: - case TimeSeriesStorageType.CHRONIFY: - if not is_chronify_installed: - msg = ( - "This system used chronify to manage time series data but the package is " - 'not installed. Please install it with `pip install "infrasys[chronify]"`.' - ) - raise ImportError(msg) - if read_only: - storage = ChronifyTimeSeriesStorage.from_file( - data, - read_only=True, - ) - else: - storage = ChronifyTimeSeriesStorage.from_file_to_tmp_file( - data, - dst_dir=dst_time_series_directory, - read_only=read_only, - ) - case TimeSeriesStorageType.ARROW: - if read_only: - storage = ArrowTimeSeriesStorage.create_with_permanent_directory( - time_series_dir - ) - else: - storage = ArrowTimeSeriesStorage.create_with_temp_directory( - base_directory=dst_time_series_directory - ) - storage.serialize({}, storage.get_time_series_directory(), src=time_series_dir) - case _: - msg = f"time_series_storage_type={ts_type} is not supported" - raise NotImplementedError(msg) - mgr = cls(con, storage=storage, initialize=False, **kwargs) + storage_class = TIME_SERIES_REGISTRY.get(ts_type) + if storage_class is None: + if ts_type == TimeSeriesStorageType.CHRONIFY and not is_chronify_installed: + msg = ( + "This system used chronify to manage time series data but the package is " + 'not installed. Please install it with `pip install "infrasys[chronify]"`.' + ) + raise ImportError(msg) + + msg = f"time_series_storage_type={ts_type} is not supported" + raise NotImplementedError(msg) + + storage, metadata_store = storage_class.deserialize( + data=data, + time_series_dir=time_series_dir, + dst_time_series_directory=dst_time_series_directory, + read_only=read_only, + **kwargs, + ) + + if metadata_store is None or not has_table( + metadata_store._con, TIME_SERIES_ASSOCIATIONS_TABLE + ): + logger.warning( + "Time series metadata store missing table %s; using restored metadata database.", + TIME_SERIES_ASSOCIATIONS_TABLE, + ) + metadata_store = TimeSeriesMetadataStore(con, initialize=False) + + # Create the manager instance + mgr = cls(con, storage=storage, metadata_store=metadata_store, initialize=False, **kwargs) + + # Load metadata and handle storage conversion if requested + mgr.metadata_store._load_metadata_into_memory() if ( "time_series_storage_type" in kwargs and _process_time_series_kwarg("time_series_storage_type", **kwargs) != ts_type @@ -450,15 +521,33 @@ def deserialize( return mgr @contextmanager - def open_time_series_store(self) -> Generator[DatabaseConnection, None, None]: + def open_time_series_store( + self, mode: Literal["r", "r+", "a", "w", "w-"] = "a" + ) -> Generator[TimeSeriesStorageContext, None, None]: """Open a connection to the time series metadata and data stores.""" - with self._storage.open_time_series_store() as data_conn: + with self.storage.open_time_series_store(mode=mode) as context: try: - yield DatabaseConnection(metadata_conn=self._con, data_conn=data_conn) + original_uuids = self._metadata_store.list_existing_time_series_uuids() + self._context = TimeSeriesStorageContext( + metadata_conn=self._con, data_context=context + ) + yield self._context self._con.commit() - except Exception: + except Exception as e: + # If we fail, we remove any new added time series (if any) and rollback the metadata. + logger.error(e) + new_uuids = ( + set(self._metadata_store.list_existing_time_series_uuids()) - original_uuids + ) + for uuid in new_uuids: + metadata_list = self._metadata_store.list_metadata_with_time_series_uuid(uuid) + for metadata in metadata_list: + self._storage.remove_time_series(metadata, context=context) + self._metadata_store.remove_by_metadata(metadata, connection=self._con) self._con.rollback() raise + finally: + self._context = None def _handle_read_only(self) -> None: if self._read_only: @@ -509,11 +598,11 @@ def make_time_series_key(metadata) -> TimeSeriesKey: @make_time_series_key.register(SingleTimeSeriesMetadata) def _(metadata: SingleTimeSeriesMetadata) -> TimeSeriesKey: return SingleTimeSeriesKey( - initial_time=metadata.initial_time, + initial_timestamp=metadata.initial_timestamp, resolution=metadata.resolution, length=metadata.length, - user_attributes=metadata.user_attributes, - variable_name=metadata.variable_name, + features=metadata.features, + name=metadata.name, time_series_type=SingleTimeSeries, ) @@ -522,15 +611,29 @@ def _(metadata: SingleTimeSeriesMetadata) -> TimeSeriesKey: def _(metadata: NonSequentialTimeSeriesMetadata) -> TimeSeriesKey: return NonSequentialTimeSeriesKey( length=metadata.length, - user_attributes=metadata.user_attributes, - variable_name=metadata.variable_name, + features=metadata.features, + name=metadata.name, time_series_type=NonSequentialTimeSeries, ) -def _get_data_connection(conn: DatabaseConnection | None) -> Any: - return None if conn is None else conn.data_conn +@make_time_series_key.register(DeterministicMetadata) +def _(metadata: DeterministicMetadata) -> TimeSeriesKey: + return DeterministicTimeSeriesKey( + initial_timestamp=metadata.initial_timestamp, + resolution=metadata.resolution, + interval=metadata.interval, + horizon=metadata.horizon, + window_count=metadata.window_count, + features=metadata.features, + name=metadata.name, + time_series_type=metadata.get_time_series_data_type(), + ) + + +def _get_data_context(conn: TimeSeriesStorageContext | None) -> Any: + return None if conn is None else conn.data_context -def _get_metadata_connection(conn: DatabaseConnection | None) -> sqlite3.Connection | None: +def _get_metadata_connection(conn: TimeSeriesStorageContext | None) -> sqlite3.Connection | None: return None if conn is None else conn.metadata_conn diff --git a/src/infrasys/time_series_metadata_store.py b/src/infrasys/time_series_metadata_store.py index 9375af4..876f164 100644 --- a/src/infrasys/time_series_metadata_store.py +++ b/src/infrasys/time_series_metadata_store.py @@ -1,76 +1,66 @@ """Stores time series metadata in a SQLite database.""" -import hashlib import itertools import json import sqlite3 from dataclasses import dataclass +from pathlib import Path from typing import Any, Iterable, Optional, Sequence from uuid import UUID -from loguru import logger +import orjson -from infrasys.exceptions import ISAlreadyAttached, ISOperationNotAllowed, ISNotStored -from infrasys import Component -from infrasys.supplemental_attribute_manager import SupplementalAttribute -from infrasys.serialization import ( - deserialize_value, - serialize_value, +from infrasys.utils.sqlite import backup, execute + +from . import ( + TIME_SERIES_ASSOCIATIONS_TABLE, + Component, +) +from .exceptions import ISAlreadyAttached, ISNotStored, ISOperationNotAllowed +from .serialization import ( SerializedTypeMetadata, - TYPE_METADATA, + deserialize_type, + serialize_value, ) -from infrasys.time_series_models import ( +from .supplemental_attribute_manager import SupplementalAttribute +from .time_series_models import ( TimeSeriesMetadata, - SingleTimeSeriesMetadataBase, - NonSequentialTimeSeriesMetadataBase, ) -from infrasys.utils.sqlite import execute +from .utils.metadata_utils import ( + create_associations_table, + create_key_value_store, + get_horizon, + get_initial_timestamp, + get_interval, + get_resolution, + get_window_count, +) class TimeSeriesMetadataStore: """Stores time series metadata in a SQLite database.""" - TABLE_NAME = "time_series_metadata" - def __init__(self, con: sqlite3.Connection, initialize: bool = True): self._con = con if initialize: - self._create_metadata_table() - - def _create_metadata_table(self): - schema = [ - "id INTEGER PRIMARY KEY", - "time_series_uuid TEXT", - "time_series_type TEXT", - "initial_time TEXT", - "resolution TEXT", - "variable_name TEXT", - "component_uuid TEXT", - "component_type TEXT", - "user_attributes_hash TEXT", - "metadata JSON", - ] - schema_text = ",".join(schema) - cur = self._con.cursor() - execute(cur, f"CREATE TABLE {self.TABLE_NAME}({schema_text})") - self._create_indexes(cur) - self._con.commit() - logger.debug("Created in-memory time series metadata table") - - def _create_indexes(self, cur) -> None: - # Index strategy: - # 1. Optimize for these user queries with indexes: - # 1a. all time series attached to one component - # 1b. time series for one component + variable_name + type - # 1c. time series for one component with all user attributes - # 2. Optimize for checks at system.add_time_series. Use all fields and attribute hash. - # 3. Optimize for returning all metadata for a time series UUID. - execute( - cur, - f"CREATE INDEX by_c_vn_tst_hash ON {self.TABLE_NAME} " - f"(component_uuid, variable_name, time_series_type, user_attributes_hash)", - ) - execute(cur, f"CREATE INDEX by_ts_uuid ON {self.TABLE_NAME} (time_series_uuid)") + assert create_associations_table(connection=self._con) + create_key_value_store(connection=self._con) + self._cache_metadata: dict[UUID, TimeSeriesMetadata] = {} + + def _load_metadata_into_memory(self): + query = f"SELECT * FROM {TIME_SERIES_ASSOCIATIONS_TABLE}" + cursor = self._con.cursor() + cursor.execute(query) + rows = cursor.fetchall() + columns = [desc[0] for desc in cursor.description] + rows = [dict(zip(columns, row)) for row in rows] + for row in rows: + assert "features" in row, ( + f"Bug: Features missing from {TIME_SERIES_ASSOCIATIONS_TABLE} table." + ) + metadata = _deserialize_time_series_metadata(row) + self._cache_metadata[metadata.uuid] = metadata + return def add( self, @@ -85,77 +75,79 @@ def add( ISAlreadyAttached Raised if the time series metadata already stored. """ - attribute_hash = _compute_user_attribute_hash(metadata.user_attributes) where_clause, params = self._make_where_clause( owners, - metadata.variable_name, + metadata.name, metadata.type, - attribute_hash=attribute_hash, - **metadata.user_attributes, + **metadata.features, ) - for owner in owners: - if isinstance(owner, SupplementalAttribute): - # This restriction can be removed when we migrate the database schema to be - # equivalent with Sienna. - msg = "Adding time series to a supplemental attribute is not supported yet" - raise ISOperationNotAllowed(msg) con = connection or self._con cur = con.cursor() - query = f"SELECT COUNT(*) FROM {self.TABLE_NAME} WHERE {where_clause}" + query = f"SELECT 1 FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE {where_clause}" res = execute(cur, query, params=params).fetchone() - if res[0] > 0: + if res: msg = f"Time series with {metadata=} is already stored." raise ISAlreadyAttached(msg) - if isinstance(metadata, SingleTimeSeriesMetadataBase): - resolution = str(metadata.resolution) - initial_time = str(metadata.initial_time) - elif isinstance(metadata, NonSequentialTimeSeriesMetadataBase): - resolution = None - initial_time = None - else: - raise NotImplementedError + # Will probably need to refactor if we introduce more metadata classes. + resolution = get_resolution(metadata) + initial_time = get_initial_timestamp(metadata) + horizon = get_horizon(metadata) + interval = get_interval(metadata) + window_count = get_window_count(metadata) + + units = None + if metadata.units: + units = orjson.dumps(serialize_value(metadata.units)) rows = [ - ( - None, # auto-assigned by sqlite - str(metadata.time_series_uuid), - metadata.type, - initial_time, - resolution, - metadata.variable_name, - str(owner.uuid), - owner.__class__.__name__, - attribute_hash, - json.dumps(serialize_value(metadata)), - ) + { + "time_series_uuid": str(metadata.time_series_uuid), + "time_series_type": metadata.type, + "initial_timestamp": initial_time, + "resolution": resolution, + "horizon": horizon, + "interval": interval, + "window_count": window_count, + "length": metadata.length if hasattr(metadata, "length") else None, + "name": metadata.name, + "owner_uuid": str(owner.uuid), + "owner_type": owner.__class__.__name__, + "owner_category": "Component", + "features": make_features_string(metadata.features), + "units": units, + "metadata_uuid": str(metadata.uuid), + } for owner in owners ] self._insert_rows(rows, cur) if connection is None: self._con.commit() + + self._cache_metadata[metadata.uuid] = metadata # else, commit/rollback will occur at a higer level. + return def get_time_series_counts(self) -> "TimeSeriesCounts": """Return summary counts of components and time series.""" query = f""" SELECT - component_type + owner_type ,time_series_type - ,initial_time + ,initial_timestamp ,resolution ,count(*) AS count - FROM {self.TABLE_NAME} + FROM {TIME_SERIES_ASSOCIATIONS_TABLE} GROUP BY - component_type + owner_type ,time_series_type - ,initial_time + ,initial_timestamp ,resolution ORDER BY - component_type + owner_type ,time_series_type - ,initial_time + ,initial_timestamp ,resolution """ cur = self._con.cursor() @@ -163,7 +155,7 @@ def get_time_series_counts(self) -> "TimeSeriesCounts": time_series_type_count = {(x[0], x[1], x[2], x[3]): x[4] for x in rows} time_series_count = execute( - cur, f"SELECT COUNT(DISTINCT time_series_uuid) from {self.TABLE_NAME}" + cur, f"SELECT COUNT(DISTINCT time_series_uuid) from {TIME_SERIES_ASSOCIATIONS_TABLE}" ).fetchall()[0][0] return TimeSeriesCounts( @@ -173,10 +165,10 @@ def get_time_series_counts(self) -> "TimeSeriesCounts": def get_metadata( self, - component: Component | SupplementalAttribute, - variable_name: Optional[str] = None, + owner: Component | SupplementalAttribute, + name: Optional[str] = None, time_series_type: Optional[str] = None, - **user_attributes, + **features, ) -> TimeSeriesMetadata: """Return the metadata matching the inputs. @@ -185,18 +177,11 @@ def get_metadata( ISOperationNotAllowed Raised if more than one metadata instance matches the inputs. """ - if variable_name is not None and time_series_type is not None: - metadata = self._try_get_time_series_metadata_by_full_params( - component, variable_name, time_series_type, **user_attributes - ) - if metadata is not None: - return metadata - metadata_list = self.list_metadata( - component, - variable_name=variable_name, + owner, + name=name, time_series_type=time_series_type, - **user_attributes, + **features, ) if not metadata_list: msg = "No time series matching the inputs is stored" @@ -211,66 +196,63 @@ def get_metadata( def has_time_series(self, time_series_uuid: UUID) -> bool: """Return True if there is time series matching the UUID.""" cur = self._con.cursor() - query = f"SELECT COUNT(*) FROM {self.TABLE_NAME} WHERE time_series_uuid = ?" + query = f"SELECT 1 FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE time_series_uuid = ?" row = execute(cur, query, params=(str(time_series_uuid),)).fetchone() - return row[0] > 0 + return row def has_time_series_metadata( self, - component: Component | SupplementalAttribute, - variable_name: Optional[str] = None, - time_series_type: Optional[str] = None, - **user_attributes: Any, + owner: Component | SupplementalAttribute, + name: Optional[str] = None, + time_series_type: str | None = None, + **features: Any, ) -> bool: """Return True if there is time series metadata matching the inputs.""" - if ( - variable_name is not None - and time_series_type is not None - and self._try_has_time_series_metadata_by_full_params( - component, variable_name, time_series_type, **user_attributes - ) - ): - return True - where_clause, params = self._make_where_clause( - (component,), variable_name, time_series_type, **user_attributes + (owner,), name, time_series_type, **features ) - query = f"SELECT COUNT(*) FROM {self.TABLE_NAME} WHERE {where_clause}" + query = f"SELECT 1 FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE {where_clause}" cur = self._con.cursor() res = execute(cur, query, params=params).fetchone() - return res[0] > 0 + return bool(res) def list_existing_time_series(self, time_series_uuids: Iterable[UUID]) -> set[UUID]: - """Return the UUIDs that are present.""" + """Return the UUIDs that are present in the database with at least one reference.""" cur = self._con.cursor() params = tuple(str(x) for x in time_series_uuids) + if not params: + return set() uuids = ",".join(itertools.repeat("?", len(params))) - query = ( - f"SELECT time_series_uuid FROM {self.TABLE_NAME} WHERE time_series_uuid IN ({uuids})" - ) + query = f"SELECT DISTINCT time_series_uuid FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE time_series_uuid IN ({uuids})" rows = execute(cur, query, params=params).fetchall() return {UUID(x[0]) for x in rows} + def list_existing_time_series_uuids(self) -> set[UUID]: + """Return the UUIDs that are present.""" + cur = self._con.cursor() + query = f"SELECT DISTINCT time_series_uuid FROM {TIME_SERIES_ASSOCIATIONS_TABLE}" + rows = execute(cur, query).fetchall() + return {UUID(x[0]) for x in rows} + def list_missing_time_series(self, time_series_uuids: Iterable[UUID]) -> set[UUID]: - """Return the UUIDs that are not present.""" - existing_uuids = set(self.list_existing_time_series(time_series_uuids)) + """Return the time_series_uuids that are no longer referenced by any owner.""" + existing_uuids = self.list_existing_time_series(time_series_uuids) return set(time_series_uuids) - existing_uuids def list_metadata( self, *owners: Component | SupplementalAttribute, - variable_name: Optional[str] = None, + name: Optional[str] = None, time_series_type: str | None = None, - **user_attributes, + **features, ) -> list[TimeSeriesMetadata]: """Return a list of metadata that match the query.""" - where_clause, params = self._make_where_clause( - owners, variable_name, time_series_type, **user_attributes + metadata_uuids = self._get_metadata_uuids_by_filter( + owners, name, time_series_type, **features ) - query = f"SELECT metadata FROM {self.TABLE_NAME} WHERE {where_clause}" - cur = self._con.cursor() - rows = execute(cur, query, params=params).fetchall() - return [_deserialize_time_series_metadata(x[0]) for x in rows] + return [ + self._cache_metadata[uuid] for uuid in metadata_uuids if uuid in self._cache_metadata + ] def list_metadata_with_time_series_uuid( self, time_series_uuid: UUID, limit: int | None = None @@ -286,68 +268,122 @@ def list_metadata_with_time_series_uuid( """ params = (str(time_series_uuid),) limit_str = "" if limit is None else f"LIMIT {limit}" - query = f"SELECT metadata FROM {self.TABLE_NAME} WHERE time_series_uuid = ? {limit_str}" + # Use the denormalized view + query = f""" + SELECT + metadata_uuid + FROM {TIME_SERIES_ASSOCIATIONS_TABLE} + WHERE + time_series_uuid = ? {limit_str} + """ cur = self._con.cursor() rows = execute(cur, query, params=params).fetchall() - return [_deserialize_time_series_metadata(x[0]) for x in rows] + return [ + self._cache_metadata[UUID(x[0])] for x in rows if UUID(x[0]) in self._cache_metadata + ] def list_rows( self, *components: Component | SupplementalAttribute, - variable_name: Optional[str] = None, - time_series_type: Optional[str] = None, + name: Optional[str] = None, + time_series_type: str | None = None, columns=None, - **user_attributes, + **features, ) -> list[tuple]: """Return a list of rows that match the query.""" where_clause, params = self._make_where_clause( - components, variable_name, time_series_type, **user_attributes + components, name, time_series_type, **features ) cols = "*" if columns is None else ",".join(columns) - query = f"SELECT {cols} FROM {self.TABLE_NAME} WHERE {where_clause}" + query = f"SELECT {cols} FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE {where_clause}" cur = self._con.cursor() rows = execute(cur, query, params=params).fetchall() return rows def remove( self, - *components: Component | SupplementalAttribute, - variable_name: str | None = None, + *owners: Component | SupplementalAttribute, + name: str | None = None, time_series_type: str | None = None, connection: sqlite3.Connection | None = None, - **user_attributes, + **features, ) -> list[TimeSeriesMetadata]: """Remove all matching rows and return the metadata.""" con = connection or self._con cur = con.cursor() - where_clause, params = self._make_where_clause( - components, variable_name, time_series_type, **user_attributes + where_clause, params = self._make_where_clause(owners, name, time_series_type, **features) + + query = ( + f"SELECT metadata_uuid FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE ({where_clause})" ) - query = f"SELECT metadata FROM {self.TABLE_NAME} WHERE {where_clause}" rows = execute(cur, query, params=params).fetchall() - metadata = [_deserialize_time_series_metadata(x[0]) for x in rows] - if not metadata: + matches = len(rows) + if not matches: msg = "No metadata matching the inputs is stored" raise ISNotStored(msg) - query = f"DELETE FROM {self.TABLE_NAME} WHERE ({where_clause})" + query = f"DELETE FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE ({where_clause})" execute(cur, query, params=params) if connection is None: - self._con.commit() + con.commit() count_deleted = execute(cur, "SELECT changes()").fetchall()[0][0] - if len(metadata) != count_deleted: - msg = f"Bug: Unexpected length mismatch: {len(metadata)=} {count_deleted=}" + if matches != count_deleted: + msg = f"Bug: Unexpected length mismatch: {matches=} {count_deleted=}" raise Exception(msg) - return metadata + + unique_metadata_uuids = {UUID(row[0]) for row in rows} + result: list[TimeSeriesMetadata] = [] + for metadata_uuid in unique_metadata_uuids: + query_count = ( + f"SELECT COUNT(*) FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE metadata_uuid = ?" + ) + count_association = execute(cur, query_count, params=[str(metadata_uuid)]).fetchone()[ + 0 + ] + if count_association == 0: + result.append(self._cache_metadata.pop(metadata_uuid)) + else: + result.append(self._cache_metadata[metadata_uuid]) + return result + + def remove_by_metadata( + self, + metadata: TimeSeriesMetadata, + connection: sqlite3.Connection | None = None, + ) -> TimeSeriesMetadata: + """Remove all associations for a given metadata and return the metadata.""" + con = connection or self._con + cur = con.cursor() + + query = f"DELETE FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE metadata_uuid = ?" + cur.execute(query, (str(metadata.uuid),)) + + if connection is None: + con.commit() + + if metadata.uuid in self._cache_metadata: + return self._cache_metadata.pop(metadata.uuid) + else: + return metadata def sql(self, query: str, params: Sequence[str] = ()) -> list[tuple]: """Run a SQL query on the time series metadata table.""" cur = self._con.cursor() return execute(cur, query, params=params).fetchall() - def _insert_rows(self, rows: list[tuple], cur: sqlite3.Cursor) -> None: - placeholder = ",".join(["?"] * len(rows[0])) - query = f"INSERT INTO {self.TABLE_NAME} VALUES({placeholder})" + def _insert_rows(self, rows: list[dict], cur: sqlite3.Cursor) -> None: + query = f""" + INSERT INTO {TIME_SERIES_ASSOCIATIONS_TABLE} ( + time_series_uuid, time_series_type, initial_timestamp, resolution, + horizon, interval, window_count, length, name, owner_uuid, + owner_type, owner_category, features, units, metadata_uuid + ) VALUES ( + :time_series_uuid, :time_series_type, :initial_timestamp, + :resolution, :horizon, :interval, :window_count, :length, :name, + :owner_uuid, :owner_type, :owner_category, :features, :units, + :metadata_uuid + ) + """ cur.executemany(query, rows) def _make_components_str( @@ -357,7 +393,7 @@ def _make_components_str( msg = "At least one component must be passed." raise ISOperationNotAllowed(msg) - or_clause = "OR ".join((itertools.repeat("component_uuid = ? ", len(owners)))) + or_clause = "OR ".join((itertools.repeat("owner_uuid = ? ", len(owners)))) for owner in owners: params.append(str(owner.uuid)) @@ -367,19 +403,18 @@ def _make_components_str( def _make_where_clause( self, owners: tuple[Component | SupplementalAttribute, ...], - variable_name: Optional[str], - time_series_type: Optional[str], - attribute_hash: Optional[str] = None, - **user_attributes: str, + name: str | None, + time_series_type: str | None, + **features: str, ) -> tuple[str, list[str]]: params: list[str] = [] component_str = self._make_components_str(params, *owners) - if variable_name is None: + if name is None: var_str = "" else: - var_str = "AND variable_name = ?" - params.append(variable_name) + var_str = "AND name = ?" + params.append(name) if time_series_type is None: ts_str = "" @@ -387,101 +422,59 @@ def _make_where_clause( ts_str = "AND time_series_type = ?" params.append(time_series_type) - if attribute_hash is None and user_attributes: - ua_hash_filter = _make_user_attribute_filter(user_attributes, params) - ua_str = f"AND {ua_hash_filter}" + if features: + feat_filter = _make_features_filter(features, params) + feat_str = f"AND {feat_filter}" else: - ua_str = "" + feat_str = "" - if attribute_hash: - ua_hash_filter = _make_user_attribute_hash_filter(attribute_hash, params) - ua_hash = f"AND {ua_hash_filter}" - else: - ua_hash = "" + return f"({component_str} {var_str} {ts_str}) {feat_str}", params - return f"({component_str} {var_str} {ts_str}) {ua_str} {ua_hash}", params + def unique_uuids_by_type(self, time_series_type: str): + query = f"SELECT DISTINCT time_series_uuid from {TIME_SERIES_ASSOCIATIONS_TABLE} where time_series_type = ?" + params = (time_series_type,) + uuid_strings = self.sql(query, params) + return [UUID(ustr[0]) for ustr in uuid_strings] - def _try_time_series_metadata_by_full_params( + def serialize(self, filename: Path | str) -> None: + """Serialize SQLite to file.""" + with sqlite3.connect(filename) as dst_con: + self._con.backup(dst_con) + cur = dst_con.cursor() + # Drop all index from the database that were created manually (sql not null) + index_to_drop = execute( + cur, "SELECT name FROM sqlite_master WHERE type ='index' AND sql IS NOT NULL" + ).fetchall() + for index in index_to_drop: + execute(cur, f"DROP INDEX {index[0]}") + dst_con.close() + backup(self._con, filename) + return + + def _get_metadata_uuids_by_filter( self, - owner: Component | SupplementalAttribute, - variable_name: str, - time_series_type: str, - column: str, - **user_attributes: str, - ) -> list[tuple] | None: - assert variable_name is not None - assert time_series_type is not None - where_clause, params = self._make_where_clause( - (owner,), - variable_name, - time_series_type, - attribute_hash=_compute_user_attribute_hash(user_attributes), - **user_attributes, - ) - query = f"SELECT {column} FROM {self.TABLE_NAME} WHERE {where_clause}" + owners: tuple[Component | SupplementalAttribute, ...], + name: Optional[str] = None, + time_series_type: str | None = None, + **features: Any, + ) -> list[UUID]: + """Get metadata UUIDs that match the filter criteria using progressive filtering.""" cur = self._con.cursor() - rows = execute(cur, query, params=params).fetchall() - if not rows: - return None - - return rows - def _try_get_time_series_metadata_by_full_params( - self, - owner: Component | SupplementalAttribute, - variable_name: str, - time_series_type: str, - **user_attributes: str, - ) -> TimeSeriesMetadata | None: - """Attempt to get the metadata by using all parameters. - - This will return the metadata if the user passes all user attributes that exist in the - time series metadata. This is highly advantageous in cases where one component has a large - number of time series and each metadata has user attributes. Otherwise, SQLite has to - parse the JSON values. - """ - rows = self._try_time_series_metadata_by_full_params( - owner, - variable_name, - time_series_type, - "metadata", - **user_attributes, - ) - if rows is None: - return rows - - if len(rows) > 1: - msg = f"Found more than one metadata matching inputs: {len(rows)}" - raise ISOperationNotAllowed(msg) - - return _deserialize_time_series_metadata(rows[0][0]) + where_clause, params = self._make_where_clause(owners, name, time_series_type) + features_str = make_features_string(features) + if features_str: + params.append(features_str) + query = f"SELECT metadata_uuid FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE {where_clause} AND features = ?" + rows = execute(cur, query, params=params).fetchall() - def _try_has_time_series_metadata_by_full_params( - self, - owner: Component | SupplementalAttribute, - variable_name: str, - time_series_type: str, - **user_attributes: str, - ) -> bool: - """Attempt to check if the metadata is stored by using all parameters. Refer to - _try_get_time_series_metadata_by_full_params for more information. - """ - text = self._try_time_series_metadata_by_full_params( - owner, - variable_name, - time_series_type, - "id", - **user_attributes, - ) - return text is not None + if rows: + return [UUID(row[0]) for row in rows] - def unique_uuids_by_type(self, time_series_type: str): - query = ( - f"SELECT DISTINCT time_series_uuid from {self.TABLE_NAME} where time_series_type = ?" - ) - params = (time_series_type,) - uuid_strings = self.sql(query, params) - return [UUID(ustr[0]) for ustr in uuid_strings] + where_clause, params = self._make_where_clause(owners, name, time_series_type, **features) + query = f"SELECT metadata_uuid FROM {TIME_SERIES_ASSOCIATIONS_TABLE} WHERE {where_clause}" + rows = execute(cur, query, params=params).fetchall() + return [UUID(row[0]) for row in rows] @dataclass @@ -493,40 +486,59 @@ class TimeSeriesCounts: time_series_type_count: dict[tuple[str, str, str, str], int] -def _make_user_attribute_filter(user_attributes: dict[str, Any], params: list[str]) -> str: - attrs = _make_user_attribute_dict(user_attributes) - items = [] - for key, val in attrs.items(): - items.append(f"metadata->>'$.user_attributes.{key}' = ? ") - params.append(val) - return "AND ".join(items) - - -def _make_user_attribute_hash_filter(attribute_hash: str, params: list[str]) -> str: - params.append(attribute_hash) - return "user_attributes_hash = ?" - - -def _make_user_attribute_dict(user_attributes: dict[str, Any]) -> dict[str, Any]: - return {k: user_attributes[k] for k in sorted(user_attributes)} - - -def _compute_user_attribute_hash(user_attributes: dict[str, Any]) -> str | None: - if not user_attributes: - return None - - attrs = _make_user_attribute_dict(user_attributes) - return _compute_hash(bytes(json.dumps(attrs), encoding="utf-8")) - - -def _compute_hash(text: bytes) -> str: - hash_obj = hashlib.sha256() - hash_obj.update(text) - return hash_obj.hexdigest() - - -def _deserialize_time_series_metadata(text: str) -> TimeSeriesMetadata: - data = json.loads(text) - type_metadata = SerializedTypeMetadata(**data.pop(TYPE_METADATA)) - metadata = deserialize_value(data, type_metadata.fields) - return metadata +def _make_features_filter(features: dict[str, Any], params: list[str]) -> str: + conditions = [] + for key, value in features.items(): + conditions.append("features LIKE ?") + if isinstance(value, str): + params.append(f'%"{key}":"{value}"%') + elif isinstance(value, bool): + params.append(f'%"{key}":{str(value).lower()}%') + else: + params.append(f'%"{key}":{value}%') + return " AND ".join(conditions) + + +def _make_features_dict(features: dict[str, Any]) -> dict[str, Any]: + return {k: features[k] for k in sorted(features)} + + +def _deserialize_time_series_metadata(data: dict) -> TimeSeriesMetadata: + time_series_type = data.pop("time_series_type") + # NOTE: This is only relevant for compatibility with IS.jl and can be + # removed in the future when we have tigther integration + if time_series_type == "DeterministicSingleTimeSeries": + time_series_type = "Deterministic" + + serialized_type = SerializedTypeMetadata.validate_python( + { + "module": "infrasys", + "type": time_series_type, + "serialized_type": "base", + } + ) + metadata = deserialize_type(serialized_type).get_time_series_metadata_type() + + # Deserialize JSON columns + for column in ["features", "scaling_factor_multiplier", "units"]: + if data.get(column): + data[column] = json.loads(data[column]) + + # Features requires special handling since it is a sorted array with key value pairs. + if data.get("features"): + data["features"] = data["features"][0] + else: + data["features"] = {} + + data["uuid"] = data.pop("metadata_uuid") + data["type"] = time_series_type + metadata_instance = metadata.model_validate( + {key: value for key, value in data.items() if key in metadata.model_fields} + ) + return metadata_instance + + +def make_features_string(features: dict[str, Any]) -> str: + """Serializes a dictionary of features into a sorted string.""" + data = [{key: value} for key, value in sorted(features.items())] + return orjson.dumps(data).decode() diff --git a/src/infrasys/time_series_models.py b/src/infrasys/time_series_models.py index 39dc571..3fb63cf 100644 --- a/src/infrasys/time_series_models.py +++ b/src/infrasys/time_series_models.py @@ -9,10 +9,10 @@ Any, Literal, Optional, + Sequence, Type, TypeAlias, Union, - Sequence, ) from uuid import UUID @@ -23,9 +23,9 @@ from pydantic import ( Field, WithJsonSchema, + computed_field, field_serializer, field_validator, - computed_field, model_validator, ) from typing_extensions import Annotated @@ -33,10 +33,9 @@ from infrasys.exceptions import ( ISConflictingArguments, ) -from infrasys.models import InfraSysBaseModelWithIdentifers, InfraSysBaseModel +from infrasys.models import InfraSysBaseModel, InfraSysBaseModelWithIdentifers from infrasys.normalization import NormalizationModel - TIME_COLUMN = "timestamp" VALUE_COLUMN = "value" @@ -57,17 +56,17 @@ class TimeSeriesStorageType(StrEnum): class TimeSeriesData(InfraSysBaseModelWithIdentifers, abc.ABC): """Base class for all time series models""" - variable_name: str + name: str normalization: NormalizationModel = None @property def summary(self) -> str: - """Return the variable_name of the time series array with its type.""" - return f"{self.__class__.__name__}.{self.variable_name}" + """Return the name of the time series array with its type.""" + return f"{self.__class__.__name__}.{self.name}" @staticmethod @abc.abstractmethod - def get_time_series_metadata_type() -> Type: + def get_time_series_metadata_type() -> Type["TimeSeriesMetadata"]: """Return the metadata type associated with this time series type.""" @@ -76,9 +75,10 @@ class SingleTimeSeries(TimeSeriesData): data: NDArray | pint.Quantity resolution: timedelta - initial_time: datetime + initial_timestamp: datetime - @computed_field + @computed_field # type: ignore + @property def length(self) -> int: """Return the length of the data.""" return len(self.data) @@ -120,8 +120,8 @@ def check_data(cls, data) -> NDArray | pint.Quantity: # Standarize what object def from_array( cls, data: ISArray, - variable_name: str, - initial_time: datetime, + name: str, + initial_timestamp: datetime, resolution: timedelta, normalization: NormalizationModel = None, ) -> "SingleTimeSeries": @@ -135,7 +135,7 @@ def from_array( Start time for the time series (e.g., datetime(2020,1,1)) resolution Resolution of the time series (e.g., 30min, 1hr) - variable_name + name Name assigned to the values of the time series (e.g., active_power) Returns @@ -156,8 +156,8 @@ def from_array( return SingleTimeSeries( data=data, # type: ignore - variable_name=variable_name, - initial_time=initial_time, + name=name, + initial_timestamp=initial_timestamp, resolution=resolution, normalization=normalization, ) @@ -166,7 +166,7 @@ def from_array( def from_time_array( cls, data: ISArray, - variable_name: str, + name: str, time_index: Sequence[datetime], normalization: NormalizationModel = None, ) -> "SingleTimeSeries": @@ -176,7 +176,7 @@ def from_time_array( ---------- data Sequence that contains the values of the time series - variable_name + name Name assigned to the values of the time series (e.g., active_power) time_index Sequence that contains the index of the time series @@ -195,15 +195,15 @@ def from_time_array( """ # Infer initial time from the time_index. - initial_time = time_index[0] + initial_timestamp = time_index[0] # This does not cover changes mult-resolution time index. resolution = time_index[1] - time_index[0] return SingleTimeSeries.from_array( data, - variable_name, - initial_time, + name, + initial_timestamp, resolution, normalization=normalization, ) @@ -211,11 +211,11 @@ def from_time_array( def make_timestamps(self) -> NDArray: """Return the timestamps as a numpy array.""" return pd.date_range( - start=self.initial_time, periods=len(self.data), freq=self.resolution + start=self.initial_timestamp, periods=len(self.data), freq=self.resolution ).values @staticmethod - def get_time_series_metadata_type() -> Type: + def get_time_series_metadata_type() -> Type["SingleTimeSeriesMetadata"]: return SingleTimeSeriesMetadata @property @@ -229,6 +229,113 @@ class SingleTimeSeriesScalingFactor(SingleTimeSeries): """Defines a time array with a single dimension of floats that are 0-1 scaling factors.""" +class Forecast(TimeSeriesData): + """Defines the time series types for forecast.""" + + ... + + +class AbstractDeterministic(TimeSeriesData): + """Defines the abstric type for deterministic time series forecast.""" + + data: NDArray | pint.Quantity + resolution: timedelta + initial_timestamp: datetime + horizon: timedelta + interval: timedelta + window_count: int + + @staticmethod + def get_time_series_metadata_type() -> Type["DeterministicMetadata"]: + return DeterministicMetadata + + @property + def data_array(self) -> NDArray: + if isinstance(self.data, pint.Quantity): + return self.data.magnitude + return self.data + + +class Deterministic(AbstractDeterministic): + """A deterministic forecast for a particular data field in a Component. + + This is a Pydantic model used to represent deterministic forecasts where the forecast + data is explicitly stored as a 2D array. Each row in the array represents a forecast window, + and each column represents a time step within the forecast horizon. + + Parameters + ---------- + data : NDArray | pint.Quantity + The normalized forecast data as a 2D array. + resolution : timedelta + The resolution of the forecast time series. + initial_timestamp : datetime + The starting timestamp for the forecast. + horizon : timedelta + The forecast horizon, indicating the duration of each forecast window. + interval : timedelta + The time interval between consecutive forecast windows. + window_count : int + The number of forecast windows. + + Attributes + ---------- + data_array : NDArray + Returns the underlying numpy array (stripping any Pint units if present). + + See Also + -------- + from_single_time_series : A classmethod that creates a deterministic forecast from + an existing SingleTimeSeries for "perfect forecast" scenarios. + """ + + @classmethod + def from_array( + cls, + data: ISArray, + name: str, + initial_timestamp: datetime, + resolution: timedelta, + horizon: timedelta, + interval: timedelta, + window_count: int, + ) -> "Deterministic": + """Constructor for `Deterministic` time series that creates an instance from a sequence. + + Parameters + ---------- + data + Sequence that contains the values of the time series + name + Name assigned to the values of the time series (e.g., active_power) + initial_time + Start time for the time series (e.g., datetime(2020,1,1)) + resolution + Resolution of the time series (e.g., 30min, 1hr) + horizon + Horizon of the time series (e.g., 30min, 1hr) + window_count + Number of windows that the time series represent + + Returns + ------- + Deterministic + """ + + return Deterministic( + data=data, # type: ignore + name=name, + initial_timestamp=initial_timestamp, + resolution=resolution, + horizon=horizon, + interval=interval, + window_count=window_count, + ) + + +DeterministicTimeSeriesType: TypeAlias = Deterministic + + # TODO: # read CSV and Parquet and convert each column to a SingleTimeSeries @@ -257,20 +364,25 @@ def deserialize_from_strings(cls, values: dict[str, Any]) -> dict[str, Any]: return values -class TimeSeriesMetadata(InfraSysBaseModel, abc.ABC): +class TimeSeriesMetadata(InfraSysBaseModelWithIdentifers, abc.ABC): """Defines common metadata for all time series.""" - variable_name: str + name: str time_series_uuid: UUID - user_attributes: dict[str, Any] = {} - quantity_metadata: Optional[QuantityMetadata] = None + features: dict[str, Any] = {} + units: Optional[QuantityMetadata] = None normalization: NormalizationModel = None - type: Literal["SingleTimeSeries", "SingleTimeSeriesScalingFactor", "NonSequentialTimeSeries"] + type: Literal[ + "SingleTimeSeries", + "SingleTimeSeriesScalingFactor", + "NonSequentialTimeSeries", + "Deterministic", + ] @property def label(self) -> str: - """Return the variable_name of the time series array with its type.""" - return f"{self.type}.{self.variable_name}" + """Return the name of the time series array with its type.""" + return f"{self.type}.{self.name}" @staticmethod @abc.abstractmethod @@ -283,19 +395,23 @@ def get_time_series_data_type() -> Type: def get_time_series_type_str() -> str: """Return the time series type as a string.""" + @classmethod + def from_data(cls, time_series: Any, **features) -> Any: + """Construct an instance of TimeSeriesMetadata.""" + class SingleTimeSeriesMetadataBase(TimeSeriesMetadata, abc.ABC): """Base class for SingleTimeSeries metadata.""" length: int - initial_time: datetime + initial_timestamp: datetime resolution: timedelta type: Literal["SingleTimeSeries", "SingleTimeSeriesScalingFactor"] @classmethod - def from_data(cls, time_series: SingleTimeSeries, **user_attributes) -> Any: + def from_data(cls, time_series: SingleTimeSeries, **features) -> Any: """Construct a SingleTimeSeriesMetadata from a SingleTimeSeries.""" - quantity_metadata = ( + units = ( QuantityMetadata( module=type(time_series.data).__module__, quantity_type=type(time_series.data), @@ -305,13 +421,13 @@ def from_data(cls, time_series: SingleTimeSeries, **user_attributes) -> Any: else None ) return cls( - variable_name=time_series.variable_name, + name=time_series.name, resolution=time_series.resolution, - initial_time=time_series.initial_time, + initial_timestamp=time_series.initial_timestamp, length=time_series.length, # type: ignore time_series_uuid=time_series.uuid, - user_attributes=user_attributes, - quantity_metadata=quantity_metadata, + features=features, + units=units, normalization=time_series.normalization, type=cls.get_time_series_type_str(), # type: ignore ) @@ -326,16 +442,16 @@ def get_range( if start_time is None: index = 0 else: - if start_time < self.initial_time: + if start_time < self.initial_timestamp: msg = "{start_time=} is less than {self.initial_time=}" raise ISConflictingArguments(msg) - if start_time >= self.initial_time + self.length * self.resolution: + if start_time >= self.initial_timestamp + self.length * self.resolution: msg = f"{start_time=} is too large: {self=}" raise ISConflictingArguments(msg) - diff = start_time - self.initial_time + diff = start_time - self.initial_timestamp if (diff % self.resolution).total_seconds() != 0.0: msg = ( - f"{start_time=} conflicts with initial_time={self.initial_time} and " + f"{start_time=} conflicts with initial_time={self.initial_timestamp} and " f"resolution={self.resolution}" ) raise ISConflictingArguments(msg) @@ -374,8 +490,122 @@ def get_time_series_type_str() -> str: return "SingleTimeSeriesScalingFactor" +class DeterministicMetadata(TimeSeriesMetadata): + """Defines the metadata for Deterministic time series. + + This metadata can represent either: + 1. A regular Deterministic forecast with stored 2D data + 2. A DeterministicSingleTimeSeries that references a SingleTimeSeries (like Julia's approach) + + When the time_series_uuid points to a SingleTimeSeries (no separate Deterministic data file), + the data is loaded on-the-fly from that SingleTimeSeries instead of from stored Deterministic data. + This is detected by checking if the data file exists or by checking if there's a flag. + """ + + initial_timestamp: datetime + resolution: timedelta + interval: timedelta + horizon: timedelta + window_count: int + type: Literal["Deterministic"] + + @staticmethod + def get_time_series_data_type() -> Type[TimeSeriesData]: + """Return the data type associated with this metadata type.""" + return Deterministic + + @staticmethod + def get_time_series_type_str() -> str: + """Return the time series type as a string.""" + return "Deterministic" + + @classmethod + def from_data( + cls, time_series: DeterministicTimeSeriesType, **features: Any + ) -> "DeterministicMetadata": + """Construct a DeterministicMetadata from a Deterministic time series.""" + units = ( + QuantityMetadata( + module=type(time_series.data).__module__, + quantity_type=type(time_series.data), + units=str(time_series.data.units), + ) + if isinstance(time_series.data, pint.Quantity) + else None + ) + + return cls( + name=time_series.name, + initial_timestamp=time_series.initial_timestamp, + resolution=time_series.resolution, + interval=time_series.interval, + horizon=time_series.horizon, + window_count=time_series.window_count, + time_series_uuid=time_series.uuid, + features=features, + units=units, + normalization=time_series.normalization, + type="Deterministic", + ) + + def get_range( + self, start_time: datetime | None = None, length: int | None = None + ) -> tuple[int, int]: + """Return the range to be used to index into the dataframe.""" + horizon_steps = int(self.horizon / self.resolution) + interval_steps = int(self.interval / self.resolution) + total_steps = interval_steps * (self.window_count - 1) + horizon_steps + + if start_time is None and length is None: + return (0, total_steps) + + if start_time is None: + index = 0 + else: + if start_time < self.initial_timestamp: + msg = f"{start_time=} is less than {self.initial_timestamp=}" + raise ISConflictingArguments(msg) + + last_valid_time = ( + self.initial_timestamp + (self.window_count - 1) * self.interval + self.horizon + ) + if start_time > last_valid_time: + msg = f"{start_time=} is too large: {self=}" + raise ISConflictingArguments(msg) + + diff = start_time - self.initial_timestamp + if (diff % self.resolution).total_seconds() != 0.0: + msg = ( + f"{start_time=} conflicts with initial_timestamp={self.initial_timestamp} and " + f"resolution={self.resolution}" + ) + raise ISConflictingArguments(msg) + + index = int(diff / self.resolution) + + if length is None: + length = total_steps - index + + if index + length > total_steps: + msg = f"{start_time=} {length=} conflicts with {self=}" + raise ISConflictingArguments(msg) + + return (index, length) + + @property + def length(self) -> int: + """Return the total length of the deterministic time series.""" + horizon_steps = int(self.horizon / self.resolution) + interval_steps = int(self.interval / self.resolution) + return interval_steps * (self.window_count - 1) + horizon_steps + + TimeSeriesMetadataUnion = Annotated[ - Union[SingleTimeSeriesMetadata, SingleTimeSeriesScalingFactorMetadata], + Union[ + SingleTimeSeriesMetadata, + SingleTimeSeriesScalingFactorMetadata, + DeterministicMetadata, + ], Field(discriminator="type"), ] @@ -455,7 +685,7 @@ def from_array( cls, data: ISArray, timestamps: Sequence[datetime] | NDArray, - variable_name: str, + name: str, normalization: NormalizationModel = None, ) -> "NonSequentialTimeSeries": """Method of NonSequentialTimeSeries that creates an instance from an array and timestamps. @@ -466,7 +696,7 @@ def from_array( Sequence that contains the values of the time series timestamps Sequence that contains the non-sequential timestamps - variable_name + name Name assigned to the values of the time series (e.g., active_power) normalization Normalization model to normalize the data @@ -476,18 +706,18 @@ def from_array( NonSequentialTimeSeries """ if normalization is not None: - npa = data if isinstance(data, np.ndarray) else np.array(data) + npa = data if isinstance(data, np.ndarray) else np.asarray(data) data = normalization.normalize_array(npa) return NonSequentialTimeSeries( data=data, # type: ignore timestamps=timestamps, # type: ignore - variable_name=variable_name, + name=name, normalization=normalization, ) @staticmethod - def get_time_series_metadata_type() -> Type: + def get_time_series_metadata_type() -> Type["NonSequentialTimeSeriesMetadata"]: "Get the metadata type of the NonSequentialTimeSeries" return NonSequentialTimeSeriesMetadata @@ -512,10 +742,10 @@ class NonSequentialTimeSeriesMetadataBase(TimeSeriesMetadata, abc.ABC): @classmethod def from_data( - cls, time_series: NonSequentialTimeSeries, **user_attributes + cls, time_series: NonSequentialTimeSeries, **features ) -> "NonSequentialTimeSeriesMetadataBase": """Construct a NonSequentialTimeSeriesMetadata from a NonSequentialTimeSeries.""" - quantity_metadata = ( + units = ( QuantityMetadata( module=type(time_series.data).__module__, quantity_type=type(time_series.data), @@ -525,11 +755,11 @@ def from_data( else None ) return cls( - variable_name=time_series.variable_name, + name=time_series.name, length=time_series.length, # type: ignore time_series_uuid=time_series.uuid, - user_attributes=user_attributes, - quantity_metadata=quantity_metadata, + features=features, + units=units, normalization=time_series.normalization, type=cls.get_time_series_type_str(), # type: ignore ) @@ -552,16 +782,16 @@ def get_time_series_type_str() -> str: class TimeSeriesKey(InfraSysBaseModel): """Base class for time series keys.""" - variable_name: str + name: str time_series_type: Type[TimeSeriesData] - user_attributes: dict[str, Any] = {} + features: dict[str, Any] = {} class SingleTimeSeriesKey(TimeSeriesKey): """Keys for SingleTimeSeries.""" length: int - initial_time: datetime + initial_timestamp: datetime resolution: timedelta @@ -571,8 +801,18 @@ class NonSequentialTimeSeriesKey(TimeSeriesKey): length: int -class DatabaseConnection(InfraSysBaseModel): +class DeterministicTimeSeriesKey(TimeSeriesKey): + """Keys for Deterministic time series.""" + + initial_timestamp: datetime + resolution: timedelta + interval: timedelta + horizon: timedelta + window_count: int + + +class TimeSeriesStorageContext(InfraSysBaseModel): """Stores connections to the metadata and data databases during transactions.""" metadata_conn: sqlite3.Connection - data_conn: Any = None + data_context: Any = None diff --git a/src/infrasys/time_series_storage_base.py b/src/infrasys/time_series_storage_base.py index 6a7af29..ac2c892 100644 --- a/src/infrasys/time_series_storage_base.py +++ b/src/infrasys/time_series_storage_base.py @@ -4,10 +4,9 @@ from contextlib import contextmanager from datetime import datetime from pathlib import Path -from typing import Any, Optional +from typing import Any, Generator, Literal, Optional from infrasys.time_series_models import TimeSeriesData, TimeSeriesMetadata -from typing import Generator class TimeSeriesStorageBase(abc.ABC): @@ -18,7 +17,7 @@ def add_time_series( self, metadata: TimeSeriesMetadata, time_series: TimeSeriesData, - connection: Any = None, + context: Any = None, ) -> None: """Store a time series array.""" @@ -34,12 +33,12 @@ def get_time_series( metadata: TimeSeriesMetadata, start_time: datetime | None = None, length: int | None = None, - connection: Any = None, + context: Any = None, ) -> TimeSeriesData: """Return a time series array.""" @abc.abstractmethod - def remove_time_series(self, metadata: TimeSeriesMetadata, connection: Any = None) -> None: + def remove_time_series(self, metadata: TimeSeriesMetadata, context: Any = None) -> None: """Remove a time series array.""" @abc.abstractmethod @@ -48,7 +47,41 @@ def serialize( ) -> None: """Serialize all time series to the destination directory.""" + @classmethod + @abc.abstractmethod + def deserialize( + cls, + data: dict[str, Any], + time_series_dir: Path, + dst_time_series_directory: Path | None, + read_only: bool, + **kwargs: Any, + ) -> tuple["TimeSeriesStorageBase", Optional[Any]]: + """Deserialize time series storage from serialized data. + + Parameters + ---------- + data : dict[str, Any] + Serialized storage data + time_series_dir : Path + Directory containing the serialized time series files + dst_time_series_directory : Path | None + Destination directory for time series files (None for temp directory) + read_only : bool + Whether to open in read-only mode + **kwargs : Any + Additional storage-specific parameters + + Returns + ------- + tuple[TimeSeriesStorageBase, Optional[Any]] + A tuple of (storage instance, optional metadata store) + The metadata store is only used by HDF5 storage backend + """ + @contextmanager - def open_time_series_store(self) -> Generator[Any, None, None]: + def open_time_series_store( + self, mode: Literal["r", "r+", "a", "w", "w-"] = "a" + ) -> Generator[Any, None, None]: """Open a connection to the time series store.""" yield None diff --git a/src/infrasys/utils/h5_utils.py b/src/infrasys/utils/h5_utils.py new file mode 100644 index 0000000..1159366 --- /dev/null +++ b/src/infrasys/utils/h5_utils.py @@ -0,0 +1,105 @@ +"""Utility functions for working with HDF5 files.""" + +from pathlib import Path +from typing import Literal, TypeAlias + +import h5py + +H5FileMode: TypeAlias = Literal["r", "r+", "a", "w", "w-"] + + +def copy_h5_group(src_group: h5py.Group, dst_group: h5py.Group) -> None: + """Recursively copy HDF5 group contents using h5py public API. + + This function copies datasets and subgroups from a source HDF5 group to a + destination HDF5 group, preserving the hierarchical structure and attributes. + + Parameters + ---------- + src_group : h5py.Group + Source HDF5 group to copy from + dst_group : h5py.Group + Destination HDF5 group to copy to + + Notes + ----- + - Datasets are copied with their data, dtype, and chunk settings + - Subgroups are recursively copied + - All attributes from both datasets and groups are preserved + """ + for key in src_group.keys(): + src_item = src_group[key] + if isinstance(src_item, h5py.Dataset): + # Copy dataset with only the essential properties + dst_dataset = dst_group.create_dataset( + key, + data=src_item[()], + dtype=src_item.dtype, + chunks=src_item.chunks, + ) + # Copy attributes + for attr_key, attr_val in src_item.attrs.items(): + dst_dataset.attrs[attr_key] = attr_val + elif isinstance(src_item, h5py.Group): + # Recursively copy group + dst_subgroup = dst_group.create_group(key) + copy_h5_group(src_item, dst_subgroup) + # Copy group attributes + for attr_key, attr_val in src_item.attrs.items(): + dst_subgroup.attrs[attr_key] = attr_val + + +def extract_h5_dataset_to_bytes(group: h5py.Group | h5py.File, dataset_path: str) -> bytes: + """Extract HDF5 dataset contents as bytes. + + Parameters + ---------- + group : h5py.Group | h5py.File + HDF5 group or file containing the dataset + dataset_path : str + Path to the dataset within the group + + Returns + ------- + bytes + Dataset contents as bytes + + Raises + ------ + TypeError + If the item at dataset_path is not a Dataset + + Notes + ----- + This function is useful for extracting binary data like serialized databases + from HDF5 files. + """ + item = group[dataset_path] + if isinstance(item, h5py.Dataset): + return bytes(item[:]) + + msg = f"Expected Dataset at {dataset_path!r}, got {type(item).__name__}" + raise TypeError(msg) + + +def open_h5_file(file_path: Path | str, mode: str = "a") -> h5py.File: + """Open an HDF5 file with string path conversion. + + Parameters + ---------- + file_path : Path | str + Path to the HDF5 file + mode : str, optional + File mode ('r', 'r+', 'a', 'w', 'w-'), by default 'a' + + Returns + ------- + h5py.File + Opened HDF5 file handle + + Notes + ----- + - Accepts both Path and str objects + - The file handle should be used with a context manager + """ + return h5py.File(str(file_path), mode=mode) # type: ignore[arg-type] diff --git a/src/infrasys/utils/metadata_utils.py b/src/infrasys/utils/metadata_utils.py new file mode 100644 index 0000000..ed2b133 --- /dev/null +++ b/src/infrasys/utils/metadata_utils.py @@ -0,0 +1,337 @@ +import sqlite3 +from functools import singledispatch + +from loguru import logger + +from infrasys import ( + COMPONENT_ASSOCIATIONS_TABLE, + KEY_VALUE_STORE_TABLE, + SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE, + TIME_SERIES_ASSOCIATIONS_TABLE, + TS_METADATA_FORMAT_VERSION, +) +from infrasys.time_series_models import ( + DeterministicMetadata, + SingleTimeSeriesMetadataBase, + TimeSeriesMetadata, +) +from infrasys.utils.sqlite import execute +from infrasys.utils.time_utils import to_iso_8601 + + +def create_supplemental_attribute_associations_table( + connection: sqlite3.Connection, + table_name: str = SUPPLEMENTAL_ATTRIBUTE_ASSOCIATIONS_TABLE, + with_index: bool = True, +) -> bool: + """ + Create the supplemental attribute associations table schema. + + Parameters + ---------- + connection : sqlite3.Connection + SQLite connection to the metadata store database. + table_name : str, optional + Name of the table to create, by default ``supplemental_attribute_associations``. + with_index : bool, default True + Whether to create associated lookup indexes. + + Returns + ------- + bool + True if the table exists or was created successfully. + """ + schema = [ + "id INTEGER PRIMARY KEY", + "attribute_uuid TEXT", + "attribute_type TEXT", + "component_uuid TEXT", + "component_type TEXT", + ] + schema_text = ",".join(schema) + cur = connection.cursor() + execute(cur, f"CREATE TABLE IF NOT EXISTS {table_name}({schema_text})") + logger.debug("Created supplemental attribute associations table {}", table_name) + if with_index: + create_supplemental_attribute_association_indexes(connection, table_name) + result = connection.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name=?", (table_name,) + ).fetchone() + connection.commit() + return bool(result) + + +def create_supplemental_attribute_association_indexes( + connection: sqlite3.Connection, + table_name: str = "supplemental_attribute_associations", +) -> None: + """Create lookup indexes for the supplemental attribute associations table.""" + cur = connection.cursor() + execute( + cur, + f"CREATE INDEX IF NOT EXISTS {table_name}_by_attribute " + f"ON {table_name} (attribute_uuid, component_uuid, component_type)", + ) + execute( + cur, + f"CREATE INDEX IF NOT EXISTS {table_name}_by_component " + f"ON {table_name} (component_uuid, attribute_uuid, attribute_type)", + ) + connection.commit() + + +def create_component_associations_table( + connection: sqlite3.Connection, + table_name: str = COMPONENT_ASSOCIATIONS_TABLE, + with_index: bool = True, +) -> bool: + """ + Create the component associations table schema. + + Parameters + ---------- + connection : sqlite3.Connection + SQLite connection to the metadata store database. + table_name : str, optional + Name of the table to create, by default ``COMPONENT_ASSOCIATIONS_TABLE``. + with_index : bool, default True + Whether to create lookup indexes for the table. + + Returns + ------- + bool + True if the table exists or was created successfully. + """ + schema = [ + "id INTEGER PRIMARY KEY", + "component_uuid TEXT", + "component_type TEXT", + "attached_component_uuid TEXT", + "attached_component_type TEXT", + ] + schema_text = ",".join(schema) + cur = connection.cursor() + execute(cur, f"CREATE TABLE IF NOT EXISTS {table_name}({schema_text})") + logger.debug("Created component associations table {}", table_name) + if with_index: + create_component_association_indexes(connection, table_name) + result = connection.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name=?", (table_name,) + ).fetchone() + connection.commit() + return bool(result) + + +def create_component_association_indexes( + connection: sqlite3.Connection, + table_name: str = COMPONENT_ASSOCIATIONS_TABLE, +) -> None: + """Create lookup indexes for the component associations table.""" + cur = connection.cursor() + execute( + cur, + f"CREATE INDEX IF NOT EXISTS {table_name}_by_component ON {table_name} (component_uuid)", + ) + execute( + cur, + f"CREATE INDEX IF NOT EXISTS {table_name}_by_attached_component " + f"ON {table_name} (attached_component_uuid)", + ) + connection.commit() + return + + +def create_associations_table( + connection: sqlite3.Connection, + table_name=TIME_SERIES_ASSOCIATIONS_TABLE, + with_index: bool = True, +) -> bool: + """ + Create the time series associations table schema on a DB connection. + + Parameters + ---------- + connection : sqlite3.Connection + SQLite connection to the metadata store database. + table_name : str, optional + Name of the table to create, by default ``TIME_SERIES_ASSOCIATIONS_TABLE``. + with_index : bool, default True + Whether to create the supporting indexes for the associations table. + + Returns + ------- + bool + True if the table was created successfully. + """ + schema = [ + "id INTEGER PRIMARY KEY", + "time_series_uuid TEXT NOT NULL", + "time_series_type TEXT NOT NULL", + "initial_timestamp TEXT", + "resolution TEXT NULL", + "horizon TEXT", + "interval TEXT", + "window_count INTEGER", + "length INTEGER", + "name TEXT NOT NULL", + "owner_uuid TEXT NOT NULL", + "owner_type TEXT NOT NULL", + "owner_category TEXT NOT NULL", + "features TEXT NOT NULL", + "scaling_factor_multiplier TEXT NULL", + "metadata_uuid TEXT NOT NULL", + "units TEXT NULL", + ] + schema_text = ",".join(schema) + cur = connection.cursor() + execute(cur, f"CREATE TABLE {table_name}({schema_text})") + logger.debug("Created time series associations table") + if with_index: + create_indexes(connection, table_name) + + # Return true if the table creation was succesfull + result = connection.execute( + "SELECT 1 FROM sqlite_master WHERE type='table' AND name=?", (table_name,) + ).fetchone() + + if not result: + msg = "Could not create the associations table." + raise RuntimeError(msg) + + connection.commit() + return bool(result) + + +def create_key_value_store( + connection: sqlite3.Connection, table_name=KEY_VALUE_STORE_TABLE +) -> None: + """ + Ensure the metadata key/value store exists with the current format version. + + Parameters + ---------- + connection : sqlite3.Connection + SQLite connection to the metadata store database. + table_name : str, optional + Name of the table to create, by default ``KEY_VALUE_STORE_TABLE``. + """ + schema = ["key TEXT PRIMARY KEY", "value JSON NOT NULL"] + schema_text = ",".join(schema) + cur = connection.cursor() + execute(cur, f"CREATE TABLE IF NOT EXISTS {table_name}({schema_text})") + + rows = [("version", TS_METADATA_FORMAT_VERSION)] + placeholder = ",".join(["?"] * len(rows[0])) + query = f"INSERT OR REPLACE INTO {table_name}(key, value) VALUES({placeholder})" + cur.executemany(query, rows) + connection.commit() + logger.debug("Created metadata table") + return + + +def create_indexes( + connection: sqlite3.Connection, table_name=TIME_SERIES_ASSOCIATIONS_TABLE +) -> None: + # Index strategy: + # 1. Optimize for these user queries with indexes: + # 1a. all time series attached to one component + # 1b. time series for one component + variable_name + type + # 1c. time series for one component with all user attributes + # 2. Optimize for checks at system.add_time_series. Use all fields. + # 3. Optimize for returning all metadata for a time series UUID. + logger.debug("Creating indexes on {}.", table_name) + cur = connection.cursor() + execute( + cur, + f"CREATE UNIQUE INDEX IF NOT EXISTS by_c_vn_tst_hash ON {table_name} " + f"(owner_uuid, time_series_type, name, resolution, features)", + ) + execute( + cur, + f"CREATE INDEX IF NOT EXISTS by_ts_uuid ON {table_name} (time_series_uuid)", + ) + return + + +@singledispatch +def get_resolution(metadata: TimeSeriesMetadata) -> str | None: + """Get formatted resolution from metadata or None if not available.""" + return None + + +@get_resolution.register +def _(metadata: SingleTimeSeriesMetadataBase) -> str: + """Get resolution from SingleTimeSeriesMetadataBase.""" + return to_iso_8601(metadata.resolution) + + +@get_resolution.register +def _(metadata: DeterministicMetadata) -> str: + """Get resolution from DeterministicMetadata.""" + return to_iso_8601(metadata.resolution) + + +@singledispatch +def get_initial_timestamp(metadata: TimeSeriesMetadata) -> str | None: + """Get formatted initial_timestamp from metadata or None if not available.""" + return None + + +@get_initial_timestamp.register +def _(metadata: SingleTimeSeriesMetadataBase) -> str: + """Get initial_timestamp from SingleTimeSeriesMetadataBase. Format for initial_timestamp is YYYY-MM-DDThh:mm:ss.""" + return metadata.initial_timestamp.isoformat(sep="T") + + +@get_initial_timestamp.register +def _(metadata: DeterministicMetadata) -> str: + """Get initial_timestamp from DeterministicMetadata. Format for initial_timestamp is YYYY-MM-DDThh:mm:ss""" + return metadata.initial_timestamp.isoformat(sep="T") + + +@singledispatch +def get_horizon(metadata: TimeSeriesMetadata) -> str | None: + """Get formatted horizon from metadata or None if not available.""" + return None + + +@get_horizon.register +def _(metadata: DeterministicMetadata) -> str: + """Get horizon from DeterministicMetadata.""" + return to_iso_8601(metadata.horizon) + + +@singledispatch +def get_interval(metadata: TimeSeriesMetadata) -> str | None: + """Get formatted interval from metadata or None if not available.""" + return None + + +@get_interval.register +def _(metadata: DeterministicMetadata) -> str: + """Get interval from DeterministicMetadata.""" + return to_iso_8601(metadata.interval) + + +@singledispatch +def get_window_count(metadata: TimeSeriesMetadata) -> int | None: + """Get window_count from metadata or None if not available.""" + return None + + +@get_window_count.register +def _(metadata: DeterministicMetadata) -> int: + """Get window_count from DeterministicMetadata.""" + return metadata.window_count + + +@singledispatch +def get_length(metadata: TimeSeriesMetadata) -> int | None: + """Get length from metadata or None if not available.""" + return None + + +@get_length.register +def _(metadata: SingleTimeSeriesMetadataBase) -> int: + """Get length from SingleTimeSeriesMetadataBase.""" + return metadata.length diff --git a/src/infrasys/utils/path_utils.py b/src/infrasys/utils/path_utils.py index e2f4db7..9c17fc6 100644 --- a/src/infrasys/utils/path_utils.py +++ b/src/infrasys/utils/path_utils.py @@ -1,6 +1,8 @@ import shutil from pathlib import Path +from loguru import logger + def delete_if_exists(path: Path) -> bool: """Delete a file or directory if it exists. @@ -22,3 +24,8 @@ def delete_if_exists(path: Path) -> bool: path.unlink() return True return False + + +def clean_tmp_folder(folder: Path | str) -> None: + shutil.rmtree(folder) + logger.info("Wiped time series folder: {}", folder) diff --git a/src/infrasys/utils/sqlite.py b/src/infrasys/utils/sqlite.py index eeb9099..ee01a41 100644 --- a/src/infrasys/utils/sqlite.py +++ b/src/infrasys/utils/sqlite.py @@ -2,11 +2,35 @@ import sqlite3 from pathlib import Path -from typing import Any, Sequence +from typing import Any, Literal, Sequence from loguru import logger +class ManagedConnection(sqlite3.Connection): + """SQLite connection that auto-closes on garbage collection.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self._closed = False + + def close(self) -> None: + if self._closed: + return + self._closed = True + super().close() + + def __enter__(self) -> "ManagedConnection": + return self + + def __exit__(self, exc_type, exc, tb) -> Literal[False]: + super().__exit__(exc_type, exc, tb) + return False + + def __del__(self) -> None: + self.close() + + def backup(src_con: sqlite3.Connection, filename: Path | str) -> None: """Backup a database to a file.""" with sqlite3.connect(filename) as dst_con: @@ -23,12 +47,25 @@ def restore(dst_con: sqlite3.Connection, filename: Path | str) -> None: logger.info("Restored the database from {}.", filename) -def create_in_memory_db(database: str = ":memory:") -> sqlite3.Connection: +def create_in_memory_db(database: str = ":memory:") -> ManagedConnection: """Create an in-memory database.""" - return sqlite3.connect(database) + return sqlite3.connect(database, factory=ManagedConnection) + + +def has_table(con: sqlite3.Connection, table: str) -> bool: + """Return True if the table exists in the SQLite connection.""" + try: + cur = con.cursor() + res = cur.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name=?", + (table,), + ).fetchone() + return res is not None + except sqlite3.Error: + return False def execute(cursor: sqlite3.Cursor, query: str, params: Sequence[Any] = ()) -> Any: """Execute a SQL query.""" - logger.trace("SQL query: {query} {params=}", query) + logger.trace("SQL query: {} {}", query, params) return cursor.execute(query, params) diff --git a/src/infrasys/utils/time_utils.py b/src/infrasys/utils/time_utils.py index b8e1aa3..805ad9c 100644 --- a/src/infrasys/utils/time_utils.py +++ b/src/infrasys/utils/time_utils.py @@ -173,3 +173,19 @@ def to_iso_8601(duration: timedelta | relativedelta) -> str: msg += f"{total_seconds=} must be divisible by 1ms" raise ValueError(msg) return f"P0DT{total_seconds:.3f}S" + + +def str_timedelta_to_iso_8601(delta_str: str) -> str: + """Convert a str(timedelta) to ISO 8601 string.""" + pattern = r"(?:(?P\d+) days?, )?(?P\d+):(?P\d+):(?P\d+)" + match = re.fullmatch(pattern, delta_str) + if not match: + msg = f"Invalid timedelta format: {delta_str=}" + raise ValueError(msg) + days = int(match.group("days") or 0) + hours = int(match.group("hours")) + minutes = int(match.group("minutes")) + seconds = int(match.group("seconds")) + delta = timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds) + + return to_iso_8601(delta) diff --git a/tests/conftest.py b/tests/conftest.py index 3e630f4..f5b9fd6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,13 @@ -import logging from datetime import datetime, timedelta import pytest from loguru import logger from infrasys.location import Location -from infrasys.time_series_models import SingleTimeSeries, NonSequentialTimeSeries -from .models.simple_system import SimpleSystem, SimpleBus, SimpleGenerator, SimpleSubsystem +from infrasys.quantities import Energy +from infrasys.time_series_models import NonSequentialTimeSeries, SingleTimeSeries + +from .models.simple_system import SimpleBus, SimpleGenerator, SimpleSubsystem, SimpleSystem @pytest.fixture @@ -44,24 +45,40 @@ def simple_system_with_nonsequential_time_series(simple_system) -> SimpleSystem: timestamps = [ datetime(year=2030, month=1, day=1) + timedelta(seconds=5 * i) for i in range(length) ] - ts = NonSequentialTimeSeries.from_array( - data=df, variable_name=variable_name, timestamps=timestamps - ) + ts = NonSequentialTimeSeries.from_array(data=df, name=variable_name, timestamps=timestamps) gen = simple_system.get_component(SimpleGenerator, "test-gen") simple_system.add_time_series(ts, gen) return simple_system -@pytest.fixture(autouse=True) -def propagate_logs(): - """Enable logging for the package""" +@pytest.fixture +def simple_system_with_supplemental_attributes(simple_system) -> SimpleSystem: + """Creates a system with supplemental attributes.""" + from infrasys.location import GeographicInfo + + from .test_supplemental_attributes import Attribute + + bus = simple_system.get_component(SimpleBus, "test-bus") + gen = simple_system.get_component(SimpleGenerator, "test-gen") + + attr1 = GeographicInfo.example() + attr2 = GeographicInfo.example() + attr2.geo_json["geometry"]["coordinates"] = [1.0, 2.0] - class PropagateHandler(logging.Handler): - def emit(self, record): - if logging.getLogger(record.name).isEnabledFor(record.levelno): - logging.getLogger(record.name).handle(record) + attr3 = Attribute(energy=Energy(10.0, "kWh")) + simple_system.add_supplemental_attribute(bus, attr1) + simple_system.add_supplemental_attribute(bus, attr2) + simple_system.add_supplemental_attribute(gen, attr3) + + return simple_system + + +@pytest.fixture +def caplog(caplog): + """Enable logging for the package""" logger.remove() logger.enable("infrasys") - logger.add(PropagateHandler(), format="{message}") - yield + handler_id = logger.add(caplog.handler) + yield caplog + logger.remove(handler_id) diff --git a/tests/test_arrow_storage.py b/tests/test_arrow_storage.py index c414567..9db9e8b 100644 --- a/tests/test_arrow_storage.py +++ b/tests/test_arrow_storage.py @@ -1,22 +1,22 @@ """Test related to the pyarrow storage manager.""" -import pytest from datetime import datetime, timedelta from pathlib import Path import numpy as np +import pytest from loguru import logger from infrasys.arrow_storage import ArrowTimeSeriesStorage from infrasys.in_memory_time_series_storage import InMemoryTimeSeriesStorage from infrasys.system import System from infrasys.time_series_models import ( - SingleTimeSeries, NonSequentialTimeSeries, + SingleTimeSeries, TimeSeriesStorageType, ) -from .models.simple_system import SimpleSystem, SimpleBus, SimpleGenerator +from .models.simple_system import SimpleBus, SimpleGenerator, SimpleSystem @pytest.fixture(scope="session") @@ -32,8 +32,8 @@ def test_file_creation_with_single_time_series(test_system: System): gen1 = test_system.get_component(SimpleGenerator, "gen1") ts = SingleTimeSeries.from_array( data=range(8784), - variable_name="active_power", - initial_time=datetime(year=2020, month=1, day=1), + name="active_power", + initial_timestamp=datetime(year=2020, month=1, day=1), resolution=timedelta(hours=1), ) test_system.time_series.add(ts, gen1, scenario="one", model_year="2030") @@ -53,7 +53,7 @@ def test_file_creation_with_nonsequential_time_series(test_system: System): ts = NonSequentialTimeSeries.from_array( data=range(10), timestamps=timestamps, - variable_name="active_power", + name="active_power", ) test_system.time_series.add(ts, gen1, scenario="one", model_year="2030") time_series = test_system.time_series.get(gen1, time_series_type=NonSequentialTimeSeries) @@ -72,8 +72,8 @@ def test_copy_files_with_single_time_series(tmp_path): system.add_components(bus, gen1) ts = SingleTimeSeries.from_array( data=range(8784), - variable_name="active_power", - initial_time=datetime(year=2020, month=1, day=1), + name="active_power", + initial_timestamp=datetime(year=2020, month=1, day=1), resolution=timedelta(hours=1), ) system.time_series.add(ts, gen1, scenario="two", model_year="2030") @@ -103,7 +103,7 @@ def test_copy_files_with_nonsequential_timeseries(tmp_path): ts = NonSequentialTimeSeries.from_array( data=range(10), timestamps=timestamps, - variable_name="active_power", + name="active_power", ) system.time_series.add(ts, gen1, scenario="two", model_year="2030") filename = tmp_path / "system.json" @@ -128,8 +128,8 @@ def test_read_deserialize_single_time_series(tmp_path): system.add_components(bus, gen1) ts = SingleTimeSeries.from_array( data=range(8784), - variable_name="active_power", - initial_time=datetime(year=2020, month=1, day=1), + name="active_power", + initial_timestamp=datetime(year=2020, month=1, day=1), resolution=timedelta(hours=1), ) system.time_series.add(ts, gen1, scenario="high", model_year="2030") @@ -141,7 +141,7 @@ def test_read_deserialize_single_time_series(tmp_path): deserialize_ts = system2.time_series.get(gen1b) assert isinstance(deserialize_ts, SingleTimeSeries) assert deserialize_ts.resolution == ts.resolution - assert deserialize_ts.initial_time == ts.initial_time + assert deserialize_ts.initial_timestamp == ts.initial_timestamp assert isinstance(deserialize_ts.data, np.ndarray) length = ts.length assert isinstance(length, int) @@ -160,7 +160,7 @@ def test_read_deserialize_nonsequential_time_series(tmp_path): ts = NonSequentialTimeSeries.from_array( data=range(10), timestamps=timestamps, - variable_name="active_power", + name="active_power", ) system.time_series.add(ts, gen1, scenario="high", model_year="2030") filename = tmp_path / "system.json" diff --git a/tests/test_base_quantity.py b/tests/test_base_quantity.py index d174ee7..cb81438 100644 --- a/tests/test_base_quantity.py +++ b/tests/test_base_quantity.py @@ -1,13 +1,15 @@ import os -from infrasys.system import System + +import numpy as np +import pytest +from pint import Quantity +from pint.errors import DimensionalityError from pydantic import ValidationError -from infrasys.base_quantity import ureg, BaseQuantity + +from infrasys.base_quantity import BaseQuantity, ureg from infrasys.component import Component from infrasys.quantities import ActivePower, Time, Voltage -from pint import Quantity -from pint.errors import DimensionalityError -import pytest -import numpy as np +from infrasys.system import System class BaseQuantityComponent(Component): @@ -61,8 +63,7 @@ def test_base_unit_validation(): # Check that new classes must define __base_unit__ with pytest.raises(TypeError): - class _(BaseQuantity): - ... + class _(BaseQuantity): ... test_magnitude = 100 test_unit = "volt" diff --git a/tests/test_cost_curves.py b/tests/test_cost_curves.py index 75c831c..9a1a149 100644 --- a/tests/test_cost_curves.py +++ b/tests/test_cost_curves.py @@ -1,7 +1,8 @@ +from infrasys import Component from infrasys.cost_curves import CostCurve, FuelCurve, ProductionVariableCostCurve, UnitSystem from infrasys.function_data import LinearFunctionData from infrasys.value_curves import InputOutputCurve, LinearCurve -from infrasys import Component + from .models.simple_system import SimpleSystem @@ -13,7 +14,7 @@ class NestedCostCurve(ProductionVariableCostCurve): variable: CostCurve | FuelCurve | None = None -class TestComponentWithProductionCost(Component): +class ComponentWithProductionCost(Component): cost: NestedCostCurve | None = None @@ -85,7 +86,7 @@ def test_value_curve_custom_serialization(): def test_nested_value_curve_serialization(tmp_path): system = SimpleSystem(auto_add_composed_components=True) gen_name = "thermal-gen" - gen_with_operation_cost = TestComponentWithProductionCost( + gen_with_operation_cost = ComponentWithProductionCost( name=gen_name, cost=NestedCostCurve( power_units=UnitSystem.NATURAL_UNITS, @@ -102,6 +103,6 @@ def test_nested_value_curve_serialization(tmp_path): # Test deserialization deserialized_system = SimpleSystem.from_json(filename) - gen_deserialized = deserialized_system.get_component(TestComponentWithProductionCost, gen_name) + gen_deserialized = deserialized_system.get_component(ComponentWithProductionCost, gen_name) assert gen_deserialized is not None assert gen_deserialized.cost == gen_with_operation_cost.cost diff --git a/tests/test_deterministic_time_series.py b/tests/test_deterministic_time_series.py new file mode 100644 index 0000000..4c7d569 --- /dev/null +++ b/tests/test_deterministic_time_series.py @@ -0,0 +1,242 @@ +import uuid +from datetime import datetime, timedelta +from typing import Any + +import numpy as np +import pytest + +from infrasys.exceptions import ISConflictingArguments +from infrasys.quantities import ActivePower +from infrasys.time_series_metadata_store import ( + TimeSeriesMetadataStore, + _deserialize_time_series_metadata, +) +from infrasys.time_series_models import ( + Deterministic, + DeterministicMetadata, + TimeSeriesStorageType, +) +from infrasys.utils.sqlite import create_in_memory_db +from tests.models.simple_system import SimpleGenerator, SimpleSystem + +TS_STORAGE_OPTIONS = ( + TimeSeriesStorageType.ARROW, + TimeSeriesStorageType.HDF5, +) + + +@pytest.mark.parametrize("storage_type", TS_STORAGE_OPTIONS) +def test_with_deterministic_time_series_quantity(tmp_path, storage_type): + """Test serialization of DeterministicTimeSeries with a Pint quantity and different storage types.""" + system = SimpleSystem(auto_add_composed_components=True, time_series_storage_type=storage_type) + gen = SimpleGenerator.example() + system.add_components(gen) + + initial_time = datetime(year=2020, month=9, day=1) + resolution = timedelta(hours=1) + horizon = timedelta(hours=8) + interval = timedelta(hours=1) + window_count = 3 + + forecast_data = [ + [100.0, 101.0, 101.3, 90.0, 98.0, 87.0, 88.0, 67.0], + [101.0, 101.3, 99.0, 98.0, 88.9, 88.3, 67.1, 89.4], + [99.0, 67.0, 89.0, 99.9, 100.0, 101.0, 112.0, 101.3], + ] + + data = ActivePower(np.array(forecast_data), "watts") + name = "active_power_forecast" + ts = Deterministic.from_array( + data, name, initial_time, resolution, horizon, interval, window_count + ) + system.add_time_series(ts, gen) + + sys_file = tmp_path / "system.json" + system.to_json(sys_file) + + system2 = SimpleSystem.from_json(sys_file) + gen2 = system2.get_component(SimpleGenerator, gen.name) + ts2 = system2.get_time_series(gen2, name=name) + assert isinstance(ts, Deterministic) + assert ts2.resolution == resolution + assert ts2.initial_timestamp == initial_time + + +@pytest.mark.parametrize("storage_type", TS_STORAGE_OPTIONS) +def test_with_deterministic_single_time_series_quantity(tmp_path, storage_type): + """Test serialization of Deterministic created from SingleTimeSeries with a Pint quantity and different storage types.""" + system = SimpleSystem(auto_add_composed_components=True, time_series_storage_type=storage_type) + gen = SimpleGenerator.example() + system.add_components(gen) + + initial_timestamp = datetime(year=2020, month=1, day=1) + name = "active_power" + horizon = timedelta(hours=8) + interval = timedelta(hours=1) + resolution = timedelta(hours=1) + + # Create deterministic data directly from array + # Create forecast windows manually from the time series data + horizon_steps = int(horizon / resolution) + interval_steps = int(interval / resolution) + total_steps = 8784 + window_count = int((total_steps - horizon_steps) / interval_steps) + 1 + + forecast_data = [] + for window_idx in range(window_count): + start_idx = window_idx * interval_steps + end_idx = start_idx + horizon_steps + forecast_data.append(list(range(start_idx, end_idx))) + + ts_deterministic = Deterministic.from_array( + data=np.array(forecast_data), + name=name, + resolution=resolution, + initial_timestamp=initial_timestamp, + interval=interval, + horizon=horizon, + window_count=window_count, + ) + system.add_time_series(ts_deterministic, gen) + + sys_file = tmp_path / "system.json" + system.to_json(sys_file) + + system2 = SimpleSystem.from_json(sys_file) + gen2 = system2.get_component(SimpleGenerator, gen.name) + ts2 = system2.get_time_series(gen2, name=name, time_series_type=Deterministic) + assert isinstance(ts_deterministic, Deterministic) + assert ts2.horizon == horizon + assert ts2.initial_timestamp == initial_timestamp + + +def test_deterministic_metadata_get_range(): + """Test the get_range method of DeterministicMetadata.""" + # Set up the deterministic time series parameters + initial_time = datetime(year=2020, month=9, day=1) + resolution = timedelta(hours=1) + horizon = timedelta(hours=8) + interval = timedelta(hours=4) + window_count = 3 + + # Create a metadata object for testing + metadata = DeterministicMetadata( + name="test_ts", + initial_timestamp=initial_time, + resolution=resolution, + interval=interval, + horizon=horizon, + window_count=window_count, + time_series_uuid=uuid.uuid4(), + type="Deterministic", + ) + + start_idx, length = metadata.get_range() + # The total length should be: interval_steps * (window_count - 1) + horizon_steps + # interval_steps = 4, window_count = 3, horizon_steps = 8 + # So total_steps = 4 * (3 - 1) + 8 = 16 + assert start_idx == 0 + assert length == 16 + + start_time = initial_time + timedelta(hours=5) + start_idx, length_val = metadata.get_range(start_time=start_time) + assert start_idx == 5 + assert length_val == 11 # 16 - 5 = 11 + + start_idx, length_val = metadata.get_range(length=10) + assert start_idx == 0 + assert length_val == 10 + + start_time = initial_time + timedelta(hours=5) + start_idx, length_val = metadata.get_range(start_time=start_time, length=5) + assert start_idx == 5 + assert length_val == 5 + + # Test 5: error cases + # Start time too early + with pytest.raises(ISConflictingArguments): + metadata.get_range(start_time=initial_time - timedelta(hours=1)) + + # Start time too late + last_valid_time = initial_time + (window_count - 1) * interval + horizon + with pytest.raises(ISConflictingArguments): + metadata.get_range(start_time=last_valid_time + timedelta(hours=1)) + + # Start time not aligned with resolution + with pytest.raises(ISConflictingArguments): + metadata.get_range(start_time=initial_time + timedelta(minutes=30)) + + # Length too large + with pytest.raises(ISConflictingArguments): + metadata.get_range(start_time=initial_time + timedelta(hours=10), length=10) + + +def test_deterministic_single_time_series_backwards_compatibility(tmp_path: Any) -> None: + """Test compatibility for DeterministicSingleTimeSeries type from IS.jl.""" + # Simulate metadata that would come from IS.jl with DeterministicSingleTimeSeries + # Note: resolution, interval, and horizon are stored as ISO 8601 strings in the DB + legacy_metadata_dict: dict[str, Any] = { + "metadata_uuid": str(uuid.uuid4()), + "time_series_uuid": str(uuid.uuid4()), + "time_series_type": "DeterministicSingleTimeSeries", + "name": "test_forecast", + "initial_timestamp": datetime(2020, 1, 1), + "resolution": "PT1H", # ISO 8601 format for 1 hour + "interval": "PT4H", # ISO 8601 format for 4 hours + "horizon": "PT8H", # ISO 8601 format for 8 hours + "window_count": 5, + "features": None, + "scaling_factor_multiplier": None, + "units": None, + } + metadata = _deserialize_time_series_metadata(legacy_metadata_dict.copy()) + + # Verify it was converted to Deterministic + assert isinstance(metadata, DeterministicMetadata) + assert metadata.type == "Deterministic" + assert metadata.name == "test_forecast" + assert metadata.initial_timestamp == datetime(2020, 1, 1) + assert metadata.resolution == timedelta(hours=1) + assert metadata.interval == timedelta(hours=4) + assert metadata.horizon == timedelta(hours=8) + assert metadata.window_count == 5 + + conn = create_in_memory_db() + metadata_store = TimeSeriesMetadataStore(conn, initialize=True) + cursor = conn.cursor() + owner_uuid = str(uuid.uuid4()) + + rows: list[dict[str, Any]] = [ + { + "time_series_uuid": legacy_metadata_dict["time_series_uuid"], + "time_series_type": legacy_metadata_dict["time_series_type"], # Legacy type name + "initial_timestamp": legacy_metadata_dict["initial_timestamp"].isoformat(), + "resolution": legacy_metadata_dict["resolution"], + "horizon": legacy_metadata_dict["horizon"], + "interval": legacy_metadata_dict["interval"], + "window_count": legacy_metadata_dict["window_count"], + "length": None, + "name": legacy_metadata_dict["name"], + "owner_uuid": owner_uuid, + "owner_type": "SimpleGenerator", + "owner_category": "Component", + "features": "[]", # empty features + "units": legacy_metadata_dict["units"], + "metadata_uuid": legacy_metadata_dict["metadata_uuid"], + } + ] + + metadata_store._insert_rows(rows, cursor) # type: ignore[arg-type] + conn.commit() + + metadata_store._load_metadata_into_memory() # type: ignore[misc] + + loaded_metadata = metadata_store._cache_metadata[metadata.uuid] # type: ignore[misc] + assert isinstance(loaded_metadata, DeterministicMetadata) + assert loaded_metadata.type == "Deterministic" + assert loaded_metadata.name == "test_forecast" + assert loaded_metadata.initial_timestamp == datetime(2020, 1, 1) + assert loaded_metadata.resolution == timedelta(hours=1) + assert loaded_metadata.interval == timedelta(hours=4) + assert loaded_metadata.horizon == timedelta(hours=8) + assert loaded_metadata.window_count == 5 diff --git a/tests/test_h5_storage.py b/tests/test_h5_storage.py new file mode 100644 index 0000000..069bdee --- /dev/null +++ b/tests/test_h5_storage.py @@ -0,0 +1,145 @@ +from datetime import datetime, timedelta +from pathlib import Path + +import numpy as np +import pytest + +from infrasys import System +from infrasys.exceptions import ISAlreadyAttached +from infrasys.h5_time_series_storage import HDF5TimeSeriesStorage +from infrasys.time_series_models import SingleTimeSeries, TimeSeriesStorageType +from infrasys.time_series_storage_base import TimeSeriesStorageBase +from tests.models.simple_system import SimpleBus, SimpleGenerator + + +@pytest.fixture(scope="function") +def system_with_h5_storage(tmp_path): + storage_type = TimeSeriesStorageType.HDF5 + return System( + name="TestSystem", + time_series_storage_type=storage_type, + time_series_directory=tmp_path, + auto_add_composed_components=True, + in_memory=True, + ) + + +def test_initialize_h5_storage(tmp_path): + h5_storage = HDF5TimeSeriesStorage(directory=tmp_path) + assert isinstance(h5_storage, TimeSeriesStorageBase) + + +def test_missing_module(missing_modules, tmp_path): + storage_type = TimeSeriesStorageType.HDF5 + with missing_modules("h5py"): + with pytest.raises(ImportError): + _ = System( + name="test", time_series_storage_type=storage_type, time_series_directory=tmp_path + ) + + +def test_storage_initialization(tmp_path): + storage_type = TimeSeriesStorageType.HDF5 + system = System( + name="test", time_series_storage_type=storage_type, time_series_directory=tmp_path + ) + assert isinstance(system._time_series_mgr.storage, HDF5TimeSeriesStorage) + + +def test_handler_creation(tmp_path): + storage_type = TimeSeriesStorageType.HDF5 + system = System( + name="test", + time_series_storage_type=storage_type, + time_series_directory=tmp_path, + auto_add_composed_components=True, + ) + storage = system._time_series_mgr.storage + assert isinstance(storage, HDF5TimeSeriesStorage) + + +def test_h5_time_series(tmp_path): + storage_type = TimeSeriesStorageType.HDF5 + system = System( + name="test", + time_series_storage_type=storage_type, + time_series_directory=tmp_path, + auto_add_composed_components=True, + ) + + # Adding some example components + bus = SimpleBus(name="test", voltage=1.1) + gen = SimpleGenerator(name="gen1", active_power=1.0, rating=1.0, bus=bus, available=True) + + system.add_component(gen) + + ts = SingleTimeSeries.from_array( + data=range(8784), + name="active_power", + initial_timestamp=datetime(year=2020, month=1, day=1), + resolution=timedelta(hours=1), + ) + system.add_time_series(ts, gen, scenario="one", model_year="2030") + time_series = system.get_time_series(gen) + assert np.array_equal(time_series.data, ts.data) + + system.remove_time_series(gen) + + assert not system.has_time_series(gen) + + +def test_h5py_serialization(tmp_path, system_with_h5_storage): + system = system_with_h5_storage + + # Adding some example components + bus = SimpleBus(name="test", voltage=1.1) + gen = SimpleGenerator(name="gen1", active_power=1.0, rating=1.0, bus=bus, available=True) + + system.add_component(gen) + + ts = SingleTimeSeries.from_array( + data=range(8784), + name="active_power", + initial_timestamp=datetime(year=2020, month=1, day=1), + resolution=timedelta(hours=1), + ) + system.add_time_series(ts, gen, scenario="one", model_year="2030") + + # Serialize + fpath = tmp_path / Path("test.json") + system.to_json(fpath) + fname = system._time_series_mgr.storage.STORAGE_FILE + output_time_series_file = tmp_path / f"{fpath.stem}_time_series" / fname + assert (output_time_series_file).exists() + + # Deserialize + system_deserialized = System.from_json(fpath) + storage_deserialized = system_deserialized._time_series_mgr.storage + assert isinstance(storage_deserialized, HDF5TimeSeriesStorage) + gen2 = system.get_component(SimpleGenerator, name="gen1") + time_series = system_deserialized.get_time_series(gen2) + assert np.array_equal(time_series.data, ts.data) + + +def test_h5_context_manager(system_with_h5_storage): + system = system_with_h5_storage + + bus = SimpleBus(name="test", voltage=1.1) + gen = SimpleGenerator(name="gen1", active_power=1.0, rating=1.0, bus=bus, available=True) + + system.add_component(gen) + + ts_name = "test_ts" + ts = SingleTimeSeries.from_array( + data=range(8784), + name=ts_name, + initial_timestamp=datetime(year=2020, month=1, day=1), + resolution=timedelta(hours=1), + ) + with pytest.raises(ISAlreadyAttached): + with system.open_time_series_store(mode="a"): + system.add_time_series(ts, gen, scenario="one", model_year="2030") + system.add_time_series(ts, gen, scenario="one", model_year="2030") + + # Not a single time series should have been added. + assert not system.has_time_series(gen, name=ts_name) diff --git a/tests/test_h5_utils.py b/tests/test_h5_utils.py new file mode 100644 index 0000000..33d9093 --- /dev/null +++ b/tests/test_h5_utils.py @@ -0,0 +1,287 @@ +"""Tests for HDF5 utility functions.""" + +import h5py +import numpy as np +import pytest + +from infrasys.utils.h5_utils import copy_h5_group, extract_h5_dataset_to_bytes, open_h5_file + + +@pytest.fixture +def h5_file_with_data(tmp_path): + """Create a temporary HDF5 file with test data.""" + file_path = tmp_path / "test.h5" + with h5py.File(str(file_path), "w") as f: + # Create a dataset + data = np.arange(100) + f.create_dataset("data", data=data) + f["data"].attrs["description"] = "Test data" + + # Create a group with nested data + group = f.create_group("group1") + group.create_dataset("nested_data", data=np.arange(50)) + group["nested_data"].attrs["type"] = "nested" + group.attrs["group_attr"] = "group value" + + # Create a subgroup + subgroup = group.create_group("subgroup") + subgroup.create_dataset("deep_data", data=np.array([1, 2, 3])) + + return file_path + + +def test_open_h5_file_with_path_object(tmp_path): + """Test opening HDF5 file with Path object.""" + file_path = tmp_path / "test.h5" + + # Create and close file first + with h5py.File(str(file_path), "w") as f: + f.create_dataset("data", data=[1, 2, 3]) + + # Test opening with Path object + f = open_h5_file(file_path, mode="r") + assert isinstance(f, h5py.File) + assert "data" in f + f.close() + + +def test_open_h5_file_with_string_path(tmp_path): + """Test opening HDF5 file with string path.""" + file_path = str(tmp_path / "test.h5") + + # Create and close file first + with h5py.File(file_path, "w") as f: + f.create_dataset("data", data=[1, 2, 3]) + + # Test opening with string path + f = open_h5_file(file_path, mode="r") + assert isinstance(f, h5py.File) + assert "data" in f + f.close() + + +def test_open_h5_file_create_mode(tmp_path): + """Test opening HDF5 file in create mode.""" + file_path = tmp_path / "new.h5" + + f = open_h5_file(file_path, mode="w") + assert isinstance(f, h5py.File) + f.create_dataset("test", data=[1, 2, 3]) + f.close() + + assert file_path.exists() + + +def test_extract_h5_dataset_to_bytes(h5_file_with_data): + """Test extracting dataset as bytes.""" + with h5py.File(str(h5_file_with_data), "r") as f: + result = extract_h5_dataset_to_bytes(f, "data") + + assert isinstance(result, bytes) + # Verify the data is correct + data = np.frombuffer(result, dtype=np.int64) + assert np.array_equal(data, np.arange(100)) + + +def test_extract_h5_dataset_preserves_attributes(h5_file_with_data): + """Test that extracted dataset respects attributes.""" + with h5py.File(str(h5_file_with_data), "r") as f: + result = extract_h5_dataset_to_bytes(f, "data") + assert isinstance(result, bytes) + + +def test_extract_h5_dataset_not_found(h5_file_with_data): + """Test extracting non-existent dataset raises error.""" + with h5py.File(str(h5_file_with_data), "r") as f: + with pytest.raises(KeyError): + extract_h5_dataset_to_bytes(f, "nonexistent") + + +def test_extract_h5_dataset_wrong_type(h5_file_with_data): + """Test extracting group instead of dataset raises TypeError.""" + with h5py.File(str(h5_file_with_data), "r") as f: + with pytest.raises(TypeError, match="Expected Dataset"): + extract_h5_dataset_to_bytes(f, "group1") + + +def test_copy_h5_group_single_dataset(tmp_path): + """Test copying a group with a single dataset.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source + with h5py.File(str(src_file), "w") as src: + src.create_dataset("data", data=np.arange(10)) + src["data"].attrs["attr1"] = "value1" + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify copy + with h5py.File(str(dst_file), "r") as dst: + assert "data" in dst + data_set = dst["data"] + assert isinstance(data_set, h5py.Dataset) + assert np.array_equal(data_set[()], np.arange(10)) + assert data_set.attrs["attr1"] == "value1" + + +def test_copy_h5_group_nested_structure(tmp_path): + """Test copying nested group structure.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source with nested structure + with h5py.File(str(src_file), "w") as src: + src.create_dataset("root_data", data=[1, 2, 3]) + + group1 = src.create_group("group1") + group1.create_dataset("nested_data", data=[4, 5, 6]) + group1.attrs["group_attr"] = "test" + + subgroup = group1.create_group("subgroup") + subgroup.create_dataset("deep_data", data=[7, 8, 9]) + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify nested copy + with h5py.File(str(dst_file), "r") as dst: + assert "root_data" in dst + root_data = dst["root_data"] + assert isinstance(root_data, h5py.Dataset) + assert np.array_equal(root_data[()], [1, 2, 3]) + + assert "group1" in dst + group1_obj = dst["group1"] + assert isinstance(group1_obj, h5py.Group) + assert "nested_data" in group1_obj + nested = group1_obj["nested_data"] + assert isinstance(nested, h5py.Dataset) + assert np.array_equal(nested[()], [4, 5, 6]) + assert group1_obj.attrs["group_attr"] == "test" + + assert "subgroup" in group1_obj + subgroup_obj = group1_obj["subgroup"] + assert isinstance(subgroup_obj, h5py.Group) + assert "deep_data" in subgroup_obj + deep_data = subgroup_obj["deep_data"] + assert isinstance(deep_data, h5py.Dataset) + assert np.array_equal(deep_data[()], [7, 8, 9]) + + +def test_copy_h5_group_multiple_datasets(tmp_path): + """Test copying group with multiple datasets.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source + with h5py.File(str(src_file), "w") as src: + src.create_dataset("data1", data=np.arange(5)) + src.create_dataset("data2", data=np.arange(10, 20)) + src.create_dataset("data3", data=["a", "b", "c"]) + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify all datasets copied + with h5py.File(str(dst_file), "r") as dst: + assert len(dst.keys()) == 3 + data1 = dst["data1"] + assert isinstance(data1, h5py.Dataset) + assert np.array_equal(data1[()], np.arange(5)) + + data2 = dst["data2"] + assert isinstance(data2, h5py.Dataset) + assert np.array_equal(data2[()], np.arange(10, 20)) + + data3 = dst["data3"] + assert isinstance(data3, h5py.Dataset) + assert np.array_equal(data3[()], np.array([b"a", b"b", b"c"])) + + +def test_copy_h5_group_preserves_dataset_attributes(tmp_path): + """Test that copying preserves all dataset attributes.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source with attributes + with h5py.File(str(src_file), "w") as src: + dset = src.create_dataset("data", data=[1, 2, 3, 4, 5]) + dset.attrs["description"] = "Test dataset" + dset.attrs["version"] = 1 + dset.attrs["tags"] = np.array([10, 20, 30]) + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify attributes + with h5py.File(str(dst_file), "r") as dst: + dst_data = dst["data"] + assert isinstance(dst_data, h5py.Dataset) + assert dst_data.attrs["description"] == "Test dataset" + assert dst_data.attrs["version"] == 1 + assert np.array_equal(np.asarray(dst_data.attrs["tags"]), np.array([10, 20, 30])) + + +def test_copy_h5_group_preserves_group_attributes(tmp_path): + """Test that copying preserves group attributes.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source with group attributes + with h5py.File(str(src_file), "w") as src: + group = src.create_group("mygroup") + group.create_dataset("data", data=[1, 2, 3]) + group.attrs["group_name"] = "My Group" + group.attrs["group_id"] = 42 + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify group attributes + with h5py.File(str(dst_file), "r") as dst: + mygroup = dst["mygroup"] + assert isinstance(mygroup, h5py.Group) + assert mygroup.attrs["group_name"] == "My Group" + assert mygroup.attrs["group_id"] == 42 + + +def test_copy_h5_group_empty_group(tmp_path): + """Test copying an empty group.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source with empty group + with h5py.File(str(src_file), "w") as src: + src.create_group("empty_group") + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify empty group exists + with h5py.File(str(dst_file), "r") as dst: + assert "empty_group" in dst + empty_group = dst["empty_group"] + assert isinstance(empty_group, h5py.Group) + assert len(empty_group.keys()) == 0 + + +def test_copy_h5_group_with_chunks(tmp_path): + """Test copying chunked datasets preserves chunk settings.""" + src_file = tmp_path / "src.h5" + dst_file = tmp_path / "dst.h5" + + # Create source with chunked dataset + with h5py.File(str(src_file), "w") as src: + src.create_dataset("chunked", data=np.arange(1000), chunks=(100,)) + + with h5py.File(str(dst_file), "w") as dst: + copy_h5_group(src, dst) + + # Verify chunks are preserved + with h5py.File(str(dst_file), "r") as dst: + chunked = dst["chunked"] + assert isinstance(chunked, h5py.Dataset) + assert chunked.chunks == (100,) + assert np.array_equal(chunked[()], np.arange(1000)) diff --git a/tests/test_in_memory_storage.py b/tests/test_in_memory_storage.py index 81564b9..31392dc 100644 --- a/tests/test_in_memory_storage.py +++ b/tests/test_in_memory_storage.py @@ -1,16 +1,19 @@ +from datetime import datetime, timedelta + +import numpy as np +import pytest + +from infrasys.arrow_storage import ArrowTimeSeriesStorage from infrasys.chronify_time_series_storage import ChronifyTimeSeriesStorage -from .models.simple_system import SimpleSystem, SimpleBus, SimpleGenerator +from infrasys.exceptions import ISAlreadyAttached +from infrasys.in_memory_time_series_storage import InMemoryTimeSeriesStorage from infrasys.time_series_models import ( - SingleTimeSeries, NonSequentialTimeSeries, + SingleTimeSeries, TimeSeriesStorageType, ) -from infrasys.exceptions import ISAlreadyAttached -from infrasys.arrow_storage import ArrowTimeSeriesStorage -from infrasys.in_memory_time_series_storage import InMemoryTimeSeriesStorage -from datetime import timedelta, datetime -import numpy as np -import pytest + +from .models.simple_system import SimpleBus, SimpleGenerator, SimpleSystem @pytest.mark.parametrize( @@ -64,8 +67,8 @@ def test_convert_storage_single_time_series( test_time_series_data = SingleTimeSeries( data=np.arange(24), resolution=timedelta(hours=1), - initial_time=datetime(2020, 1, 1), - variable_name="load", + initial_timestamp=datetime(2020, 1, 1), + name="load", ) system.add_time_series(test_time_series_data, test_generator) with pytest.raises(ISAlreadyAttached): @@ -75,9 +78,7 @@ def test_convert_storage_single_time_series( assert isinstance(system._time_series_mgr._storage, new_stype) - ts2 = system.get_time_series( - test_generator, time_series_type=SingleTimeSeries, variable_name="load" - ) + ts2 = system.get_time_series(test_generator, time_series_type=SingleTimeSeries, name="load") assert np.array_equal(ts2.data_array, test_time_series_data.data_array) @@ -117,7 +118,7 @@ def test_convert_storage_nonsequential_time_series( test_time_series_data = NonSequentialTimeSeries( data=np.arange(24), timestamps=timestamps, - variable_name="load", + name="load", ) system.add_time_series(test_time_series_data, test_generator) with pytest.raises(ISAlreadyAttached): @@ -126,7 +127,7 @@ def test_convert_storage_nonsequential_time_series( assert isinstance(system._time_series_mgr._storage, new_stype) ts2 = system.get_time_series( - test_generator, time_series_type=NonSequentialTimeSeries, variable_name="load" + test_generator, time_series_type=NonSequentialTimeSeries, name="load" ) assert np.array_equal(ts2.data_array, test_time_series_data.data_array) assert np.array_equal(ts2.timestamps, test_time_series_data.timestamps) diff --git a/tests/test_nonsequential_time_series.py b/tests/test_nonsequential_time_series.py index cd18507..c191d37 100644 --- a/tests/test_nonsequential_time_series.py +++ b/tests/test_nonsequential_time_series.py @@ -2,8 +2,8 @@ from datetime import datetime, timedelta -import pytest import numpy as np +import pytest from infrasys.normalization import NormalizationMax from infrasys.quantities import ActivePower @@ -40,7 +40,7 @@ def test_nonsequential_time_series_attributes(data, timestamps, variable_name): length = 4 ts = NonSequentialTimeSeries.from_array( data=data, - variable_name=variable_name, + name=variable_name, timestamps=timestamps, ) assert isinstance(ts, NonSequentialTimeSeries) @@ -53,7 +53,7 @@ def test_invalid_sequence_length(data, timestamps, variable_name): """Check that time series has at least 2 elements.""" with pytest.raises(ValueError, match="length must be at least 2"): NonSequentialTimeSeries.from_array( - data=[data[0]], variable_name=variable_name, timestamps=[timestamps[0]] + data=[data[0]], name=variable_name, timestamps=[timestamps[0]] ) @@ -66,9 +66,7 @@ def test_duplicate_timestamps(data, variable_name): datetime(2020, 5, 20), ] with pytest.raises(ValueError, match="Timestamps must be unique"): - NonSequentialTimeSeries.from_array( - data=data, variable_name=variable_name, timestamps=timestamps - ) + NonSequentialTimeSeries.from_array(data=data, name=variable_name, timestamps=timestamps) def test_chronological_timestamps(data, variable_name): @@ -80,9 +78,7 @@ def test_chronological_timestamps(data, variable_name): datetime(2020, 5, 20), ] with pytest.raises(ValueError, match="chronological order"): - NonSequentialTimeSeries.from_array( - data=data, variable_name=variable_name, timestamps=timestamps - ) + NonSequentialTimeSeries.from_array(data=data, name=variable_name, timestamps=timestamps) def test_nonsequential_time_series_attributes_with_quantity( @@ -93,7 +89,7 @@ def test_nonsequential_time_series_attributes_with_quantity( ts = NonSequentialTimeSeries.from_array( data=quantity_data, - variable_name=variable_name, + name=variable_name, timestamps=timestamps, ) assert isinstance(ts, NonSequentialTimeSeries) @@ -109,7 +105,7 @@ def test_normalization(data, timestamps, variable_name): ts = NonSequentialTimeSeries.from_array( data=data, timestamps=timestamps, - variable_name=variable_name, + name=variable_name, normalization=NormalizationMax(), ) assert isinstance(ts, NonSequentialTimeSeries) @@ -125,7 +121,7 @@ def test_normalization_quantity(quantity_data, timestamps, variable_name): ts = NonSequentialTimeSeries.from_array( data=quantity_data, timestamps=timestamps, - variable_name=variable_name, + name=variable_name, normalization=NormalizationMax(), ) assert isinstance(ts, NonSequentialTimeSeries) diff --git a/tests/test_path_utils.py b/tests/test_path_utils.py index 10c1314..d58dd63 100644 --- a/tests/test_path_utils.py +++ b/tests/test_path_utils.py @@ -1,4 +1,6 @@ -from infrasys.utils.path_utils import delete_if_exists +import os + +from infrasys.utils.path_utils import clean_tmp_folder, delete_if_exists def test_delete_if_exists(tmp_path) -> None: @@ -14,3 +16,12 @@ def test_delete_if_exists(tmp_path) -> None: assert path.exists() assert delete_if_exists(path) assert not path.exists() + + +def test_clean_tmp_folder(tmp_path) -> None: + nested = tmp_path / "keep_me" / "child" + nested.mkdir(parents=True) + (nested / "file.txt").write_text("data") + + clean_tmp_folder(tmp_path / "keep_me") + assert not os.path.exists(tmp_path / "keep_me") diff --git a/tests/test_serialization.py b/tests/test_serialization.py index 36e2522..45d8f11 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -1,28 +1,32 @@ -import json -from pathlib import Path -import random import os +import random +import zipfile from datetime import datetime, timedelta +from pathlib import Path from typing import Type import numpy as np -from numpy._typing import NDArray +import orjson import pint import pytest +from numpy._typing import NDArray from pydantic import WithJsonSchema from typing_extensions import Annotated from infrasys import Location, SingleTimeSeries, NonSequentialTimeSeries, System from infrasys.component import Component -from infrasys.quantities import Distance, ActivePower -from infrasys.exceptions import ISOperationNotAllowed -from infrasys.normalization import NormalizationMax -from infrasys.time_series_models import TimeSeriesStorageType, TimeSeriesData +from infrasys.exceptions import ISInvalidParameter, ISOperationNotAllowed +from infrasys.quantities import ActivePower, Distance +from infrasys.time_series_models import ( + TimeSeriesData, + TimeSeriesStorageType, +) + from .models.simple_system import ( - SimpleSystem, SimpleBus, SimpleGenerator, SimpleSubsystem, + SimpleSystem, ) TS_STORAGE_OPTIONS = ( @@ -100,12 +104,12 @@ def test_serialize_single_time_series(tmp_path, time_series_storage_type): gen2 = SimpleGenerator(name="gen2", active_power=1.0, rating=1.0, bus=bus, available=True) system.add_components(bus, gen1, gen2) - variable_name = "active_power" + name = "active_power" length = 8784 data = range(length) start = datetime(year=2020, month=1, day=1) resolution = timedelta(hours=1) - ts = SingleTimeSeries.from_array(data, variable_name, start, resolution) + ts = SingleTimeSeries.from_array(data, name, start, resolution) system.add_time_series(ts, gen1, gen2, scenario="high", model_year="2030") filename = tmp_path / "system.json" system.to_json(filename) @@ -113,7 +117,7 @@ def test_serialize_single_time_series(tmp_path, time_series_storage_type): gen1b = system2.get_component(SimpleGenerator, gen1.name) gen2b = system2.get_component(SimpleGenerator, gen2.name) data2 = range(1, length + 1) - ts2 = SingleTimeSeries.from_array(data2, variable_name, start, resolution) + ts2 = SingleTimeSeries.from_array(data2, name, start, resolution) system2.add_time_series(ts2, gen1b, gen2b, scenario="low", model_year="2030") filename2 = tmp_path / "system2.json" system2.to_json(filename2) @@ -122,7 +126,7 @@ def test_serialize_single_time_series(tmp_path, time_series_storage_type): system3.get_time_series( gen1b, time_series_type=SingleTimeSeries, - variable_name=variable_name, + name=name, scenario="low", model_year="2030", ).data, @@ -132,22 +136,20 @@ def test_serialize_single_time_series(tmp_path, time_series_storage_type): system3.get_time_series( gen2b, time_series_type=SingleTimeSeries, - variable_name=variable_name, + name=name, scenario="low", model_year="2030", ).data, data2, ) - check_deserialize_with_read_only_time_series( - filename, gen1.name, gen2.name, variable_name, ts.data - ) + check_deserialize_with_read_only_time_series(filename, gen1.name, gen2.name, name, ts.data) def check_deserialize_with_read_only_time_series( filename, gen1_name: str, gen2_name: str, - variable_name: str, + name: str, expected_ts_data: NDArray | pint.Quantity, expected_ts_timestamps: NDArray | None = None, time_series_type: Type[TimeSeriesData] = SingleTimeSeries, @@ -158,11 +160,9 @@ def check_deserialize_with_read_only_time_series( assert system_ts_dir == SimpleSystem._make_time_series_directory(filename) gen1b = system.get_component(SimpleGenerator, gen1_name) with pytest.raises(ISOperationNotAllowed): - system.remove_time_series(gen1b, variable_name=variable_name) + system.remove_time_series(gen1b, name=name) - ts2 = system.get_time_series( - gen1b, time_series_type=time_series_type, variable_name=variable_name - ) + ts2 = system.get_time_series(gen1b, time_series_type=time_series_type, name=name) assert np.array_equal(ts2.data, expected_ts_data) if expected_ts_timestamps is not None: assert np.array_equal(ts2.timestamps, expected_ts_timestamps) @@ -177,15 +177,13 @@ def test_serialize_nonsequential_time_series(tmp_path, time_series_storage_type) gen2 = SimpleGenerator(name="gen2", active_power=1.0, rating=1.0, bus=bus, available=True) system.add_components(bus, gen1, gen2) - variable_name = "active_power" + name = "active_power" length = 10 data = range(length) timestamps = [ datetime(year=2030, month=1, day=1) + timedelta(seconds=5 * i) for i in range(length) ] - ts = NonSequentialTimeSeries.from_array( - data=data, variable_name=variable_name, timestamps=timestamps - ) + ts = NonSequentialTimeSeries.from_array(data=data, name=name, timestamps=timestamps) system.add_time_series(ts, gen1, gen2, scenario="high", model_year="2030") filename = tmp_path / "system.json" system.to_json(filename) @@ -195,7 +193,7 @@ def test_serialize_nonsequential_time_series(tmp_path, time_series_storage_type) filename, gen1.name, gen2.name, - variable_name, + name, ts.data, ts.timestamps, time_series_type=NonSequentialTimeSeries, @@ -246,8 +244,8 @@ def test_with_single_time_series_quantity(tmp_path): initial_time = datetime(year=2020, month=1, day=1) resolution = timedelta(hours=1) data = ActivePower(range(length), "watts") - variable_name = "active_power" - ts = SingleTimeSeries.from_array(data, variable_name, initial_time, resolution) + name = "active_power" + ts = SingleTimeSeries.from_array(data, name, initial_time, resolution) system.add_time_series(ts, gen) sys_file = tmp_path / "system.json" @@ -255,13 +253,11 @@ def test_with_single_time_series_quantity(tmp_path): system2 = SimpleSystem.from_json(sys_file) gen2 = system2.get_component(SimpleGenerator, gen.name) - ts2 = system2.get_time_series( - gen2, time_series_type=SingleTimeSeries, variable_name=variable_name - ) + ts2 = system2.get_time_series(gen2, time_series_type=SingleTimeSeries, name=name) assert isinstance(ts, SingleTimeSeries) assert ts.length == length assert ts.resolution == resolution - assert ts.initial_time == initial_time + assert ts.initial_timestamp == initial_time assert isinstance(ts2.data.magnitude, np.ndarray) assert np.array_equal(ts2.data.magnitude, np.array(range(length))) @@ -273,13 +269,11 @@ def test_with_nonsequential_time_series_quantity(tmp_path): system.add_components(gen) length = 10 data = ActivePower(range(length), "watts") - variable_name = "active_power" + name = "active_power" timestamps = [ datetime(year=2030, month=1, day=1) + timedelta(seconds=100 * i) for i in range(10) ] - ts = NonSequentialTimeSeries.from_array( - data=data, variable_name=variable_name, timestamps=timestamps - ) + ts = NonSequentialTimeSeries.from_array(data=data, name=name, timestamps=timestamps) system.add_time_series(ts, gen) sys_file = tmp_path / "system.json" @@ -287,9 +281,7 @@ def test_with_nonsequential_time_series_quantity(tmp_path): system2 = SimpleSystem.from_json(sys_file) gen2 = system2.get_component(SimpleGenerator, gen.name) - ts2 = system2.get_time_series( - gen2, time_series_type=NonSequentialTimeSeries, variable_name=variable_name - ) + ts2 = system2.get_time_series(gen2, time_series_type=NonSequentialTimeSeries, name=name) assert isinstance(ts, NonSequentialTimeSeries) assert ts.length == length assert isinstance(ts2.data.magnitude, np.ndarray) @@ -298,38 +290,9 @@ def test_with_nonsequential_time_series_quantity(tmp_path): assert np.array_equal(ts2.timestamps, np.array(timestamps)) -@pytest.mark.parametrize("storage_type", TS_STORAGE_OPTIONS) -def test_system_with_single_time_series_normalization(tmp_path, storage_type): - system = SimpleSystem( - name="test-system", - auto_add_composed_components=True, - time_series_storage_type=storage_type, - ) - gen = SimpleGenerator.example() - system.add_components(gen) - variable_name = "active_power" - length = 8784 - data = list(range(length)) - start = datetime(year=2020, month=1, day=1) - resolution = timedelta(hours=1) - ts = SingleTimeSeries.from_array( - data, variable_name, start, resolution, normalization=NormalizationMax() - ) - system.add_time_series(ts, gen) - filename = tmp_path / "sys.json" - system.to_json(filename) - - system2 = SimpleSystem.from_json(filename) - gen2 = system2.get_component(SimpleGenerator, gen.name) - ts2 = system2.get_time_series( - gen2, time_series_type=SingleTimeSeries, variable_name=variable_name - ) - assert ts2.normalization.max_value == length - 1 - - def test_json_schema(): schema = ComponentWithPintQuantity.model_json_schema() - assert isinstance(json.loads(json.dumps(schema)), dict) + assert isinstance(orjson.loads(orjson.dumps(schema)), dict) def test_system_save(tmp_path, simple_system_with_time_series): @@ -356,6 +319,162 @@ def test_system_save(tmp_path, simple_system_with_time_series): assert os.path.exists(zip_fpath), f"Zip file {zip_fpath} does not exists" +def test_system_load(tmp_path, simple_system_with_time_series): + """Test loading a system from a zip archive.""" + simple_system = simple_system_with_time_series + custom_folder = "load_test_system" + fpath = tmp_path / custom_folder + fname = "test_system.json" + + simple_system.save(fpath, filename=fname, zip=True) + zip_fpath = f"{fpath}.zip" + assert os.path.exists(zip_fpath), f"Zip file {zip_fpath} was not created" + assert not os.path.exists(fpath), f"Original folder {fpath} was not deleted" + + loaded_system = SimpleSystem.load(zip_fpath) + assert loaded_system is not None + assert loaded_system.name == simple_system.name + assert loaded_system.description == simple_system.description + + original_buses = list(simple_system.get_components(SimpleBus)) + loaded_buses = list(loaded_system.get_components(SimpleBus)) + assert len(loaded_buses) == len(original_buses) + + original_gens = list(simple_system.get_components(SimpleGenerator)) + loaded_gens = list(loaded_system.get_components(SimpleGenerator)) + assert len(loaded_gens) == len(original_gens) + + for orig_gen in original_gens: + loaded_gen = loaded_system.get_component(SimpleGenerator, orig_gen.name) + orig_ts_metadata = simple_system.list_time_series_metadata(orig_gen) + loaded_ts_metadata = loaded_system.list_time_series_metadata(loaded_gen) + assert len(loaded_ts_metadata) == len(orig_ts_metadata) + + +def test_system_load_errors(tmp_path): + """Test error handling in System.load().""" + with pytest.raises(FileNotFoundError, match="Zip file does not exist"): + SimpleSystem.load(tmp_path / "nonexistent.zip") + + fake_zip = tmp_path / "fake.zip" + fake_zip.write_text("This is not a zip file") + with pytest.raises(ISInvalidParameter, match="not a valid zip archive"): + SimpleSystem.load(fake_zip) + + empty_zip = tmp_path / "empty.zip" + with zipfile.ZipFile(empty_zip, "w") as zf: + zf.writestr("readme.txt", "No JSON here") + with pytest.raises(ISInvalidParameter, match="No JSON file found"): + SimpleSystem.load(empty_zip) + + +@pytest.mark.parametrize("time_series_storage_type", TS_STORAGE_OPTIONS) +def test_system_save_load_with_storage_backends(tmp_path, time_series_storage_type): + """Test save and load methods work correctly with different storage backends.""" + # Create a system with the specified storage backend + system = SimpleSystem( + name=f"test_system_{time_series_storage_type}", + description=f"Test system with {time_series_storage_type} storage", + auto_add_composed_components=True, + time_series_storage_type=time_series_storage_type, + ) + + # Add components + bus1 = SimpleBus(name="bus1", voltage=120.0) + bus2 = SimpleBus(name="bus2", voltage=240.0) + gen1 = SimpleGenerator(name="gen1", available=True, active_power=100.0, rating=150.0, bus=bus1) + gen2 = SimpleGenerator(name="gen2", available=True, active_power=200.0, rating=250.0, bus=bus2) + system.add_components(bus1, bus2, gen1, gen2) + + # Add time series data + length = 24 + data = list(range(length)) + start = datetime(year=2024, month=1, day=1) + resolution = timedelta(hours=1) + + ts1 = SingleTimeSeries.from_array(data, "max_active_power", start, resolution) + ts2 = SingleTimeSeries.from_array([x * 2 for x in data], "max_active_power", start, resolution) + + system.add_time_series(ts1, gen1) + system.add_time_series(ts2, gen2) + + save_dir = tmp_path / f"system_{time_series_storage_type}" + system.save(save_dir, filename="system.json", zip=True) + + zip_path = f"{save_dir}.zip" + assert os.path.exists(zip_path), f"Zip file not created for {time_series_storage_type}" + assert not os.path.exists(save_dir), ( + f"Original directory not deleted for {time_series_storage_type}" + ) + + # Load from zip + loaded_system = SimpleSystem.load(zip_path) + + # Verify system metadata + assert loaded_system.name == system.name + assert loaded_system.description == system.description + + # Verify components + loaded_buses = list(loaded_system.get_components(SimpleBus)) + loaded_gens = list(loaded_system.get_components(SimpleGenerator)) + assert len(loaded_buses) == 2 + assert len(loaded_gens) == 2 + + for orig_gen in [gen1, gen2]: + loaded_gen = loaded_system.get_component(SimpleGenerator, orig_gen.name) + + # Check time series exists + orig_ts_metadata = system.list_time_series_metadata(orig_gen) + loaded_ts_metadata = loaded_system.list_time_series_metadata(loaded_gen) + assert len(loaded_ts_metadata) == len(orig_ts_metadata) == 1 + + orig_ts = system.get_time_series(orig_gen, "max_active_power") + loaded_ts = loaded_system.get_time_series(loaded_gen, "max_active_power") + + assert len(loaded_ts.data) == len(orig_ts.data) == length + assert list(loaded_ts.data) == list(orig_ts.data) + assert loaded_ts.initial_timestamp == orig_ts.initial_timestamp + assert loaded_ts.resolution == orig_ts.resolution + + +def test_system_save_load_hdf5_backend(tmp_path): + """Test save and load methods work correctly with HDF5 storage backend.""" + system = SimpleSystem( + name="test_system_hdf5", + description="Test system with HDF5 storage", + auto_add_composed_components=True, + time_series_storage_type=TimeSeriesStorageType.HDF5, + ) + + bus1 = SimpleBus(name="bus1", voltage=120.0) + gen1 = SimpleGenerator(name="gen1", available=True, active_power=100.0, rating=150.0, bus=bus1) + system.add_components(bus1, gen1) + length = 24 + data = list(range(length)) + start = datetime(year=2024, month=1, day=1) + resolution = timedelta(hours=1) + + ts1 = SingleTimeSeries.from_array(data, "active_power", start, resolution) + system.add_time_series(ts1, gen1) + + # Save to zip + save_dir = tmp_path / "system_hdf5" + system.save(save_dir, filename="system.json", zip=True) + + zip_path = f"{save_dir}.zip" + assert os.path.exists(zip_path) + assert not os.path.exists(save_dir) + + # Load from zip + loaded_system = SimpleSystem.load(zip_path) + assert loaded_system.name == system.name + + loaded_gen = loaded_system.get_component(SimpleGenerator, gen1.name) + loaded_ts = loaded_system.get_time_series(loaded_gen, "active_power") + assert len(loaded_ts.data) == length + assert list(loaded_ts.data) == data + + def test_legacy_format(): # This file was save from v0.2.1 with test_with_time_series_quantity. # Ensure that we can deserialize it. @@ -368,12 +487,12 @@ def test_convert_chronify_storage_permanent(tmp_path): auto_add_composed_components=True, time_series_storage_type=TimeSeriesStorageType.ARROW ) system.add_components(gen) - variable_name = "active_power" + name = "active_power" length = 10 data = list(range(length)) start = datetime(year=2020, month=1, day=1) resolution = timedelta(hours=1) - ts = SingleTimeSeries.from_array(data, variable_name, start, resolution) + ts = SingleTimeSeries.from_array(data, name, start, resolution) system.add_time_series(ts, gen) system.convert_storage( time_series_storage_type=TimeSeriesStorageType.CHRONIFY, diff --git a/tests/test_single_time_series.py b/tests/test_single_time_series.py index 0a56a7e..bbb0351 100644 --- a/tests/test_single_time_series.py +++ b/tests/test_single_time_series.py @@ -2,8 +2,8 @@ from datetime import datetime, timedelta -import pytest import numpy as np +import pytest from infrasys.normalization import NormalizationMax from infrasys.quantities import ActivePower @@ -17,11 +17,11 @@ def test_single_time_series_attributes(): variable_name = "active_power" data = range(length) ts = SingleTimeSeries.from_array( - data=data, variable_name=variable_name, initial_time=start, resolution=resolution + data=data, name=variable_name, initial_timestamp=start, resolution=resolution ) assert ts.length == length assert ts.resolution == resolution - assert ts.initial_time == start + assert ts.initial_timestamp == start assert isinstance(ts.data, np.ndarray) assert ts.data[-1] == length - 1 @@ -37,7 +37,7 @@ def test_from_array_construction(): assert isinstance(ts, SingleTimeSeries) assert ts.length == length assert ts.resolution == resolution - assert ts.initial_time == start + assert ts.initial_timestamp == start assert isinstance(ts.data, np.ndarray) assert ts.data[-1] == length - 1 @@ -65,7 +65,7 @@ def test_from_time_array_constructor(): assert isinstance(ts, SingleTimeSeries) assert ts.length == length assert ts.resolution == resolution - assert ts.initial_time == initial_time + assert ts.initial_timestamp == initial_time assert isinstance(ts.data, np.ndarray) assert ts.data[-1] == length - 1 @@ -82,7 +82,7 @@ def test_with_quantity(): assert isinstance(ts, SingleTimeSeries) assert ts.length == length assert ts.resolution == resolution - assert ts.initial_time == initial_time + assert ts.initial_timestamp == initial_time assert isinstance(ts.data, ActivePower) assert ts.data[-1].magnitude == length - 1 diff --git a/tests/test_supplemental_attributes.py b/tests/test_supplemental_attributes.py index 5e219dd..9a0a03b 100644 --- a/tests/test_supplemental_attributes.py +++ b/tests/test_supplemental_attributes.py @@ -1,6 +1,8 @@ +from datetime import datetime, timedelta + import pytest -from infrasys import GeographicInfo, SupplementalAttribute +from infrasys import GeographicInfo, SingleTimeSeries, SupplementalAttribute from infrasys.exceptions import ISAlreadyAttached, ISNotStored, ISOperationNotAllowed from infrasys.quantities import Energy from infrasys.system import System @@ -146,3 +148,20 @@ def test_attribute_with_basequantity(tmp_path): gen2 = system2.get_component(SimpleGenerator, "gen1") attr2: Attribute = system.get_supplemental_attributes_with_component(gen2)[0] assert attr1 == attr2 + + +def test_supplemental_attributes_with_time_series(): + bus = SimpleBus(name="test-bus", voltage=1.1) + gen = SimpleGenerator(name="gen1", active_power=1.0, rating=1.0, bus=bus, available=True) + attr1 = Attribute(energy=Energy(10.0, "kWh")) + system = SimpleSystem(auto_add_composed_components=True) + data = range(100) + start = datetime(year=2020, month=1, day=1) + resolution = timedelta(hours=1) + ts = SingleTimeSeries.from_array(data, "active_power", start, resolution) + system.add_component(gen) + system.add_supplemental_attribute(gen, attr1) + system.add_time_series(ts, attr1) + + # Assert that we can run this + system.info() diff --git a/tests/test_system.py b/tests/test_system.py index 0dcdf4c..c5d9d63 100644 --- a/tests/test_system.py +++ b/tests/test_system.py @@ -1,28 +1,30 @@ import itertools -from datetime import timedelta, datetime +from datetime import datetime, timedelta from uuid import uuid4 import numpy as np import pytest +from infrasys import TIME_SERIES_ASSOCIATIONS_TABLE, Component, Location, SingleTimeSeries from infrasys.arrow_storage import ArrowTimeSeriesStorage from infrasys.chronify_time_series_storage import ChronifyTimeSeriesStorage from infrasys.exceptions import ( ISAlreadyAttached, + ISConflictingArguments, ISNotStored, ISOperationNotAllowed, - ISConflictingArguments, ) -from infrasys import Component, Location, SingleTimeSeries, NonSequentialTimeSeries from infrasys.quantities import ActivePower from infrasys.time_series_models import TimeSeriesKey, TimeSeriesStorageType +from infrasys.utils.time_utils import to_iso_8601 + from .models.simple_system import ( GeneratorBase, - SimpleSystem, + RenewableGenerator, SimpleBus, SimpleGenerator, SimpleSubsystem, - RenewableGenerator, + SimpleSystem, ) @@ -218,12 +220,10 @@ def test_single_time_series_attach_from_array(): resolution = timedelta(hours=1) ts = SingleTimeSeries.from_array(data, variable_name, start, resolution) system.add_time_series(ts, gen1, gen2) - assert system.has_time_series(gen1, variable_name=variable_name) - assert system.has_time_series(gen2, variable_name=variable_name) + assert system.has_time_series(gen1, name=variable_name) + assert system.has_time_series(gen2, name=variable_name) assert np.array_equal( - system.get_time_series( - gen1, time_series_type=SingleTimeSeries, variable_name=variable_name - ).data, + system.get_time_series(gen1, time_series_type=SingleTimeSeries, name=variable_name).data, ts.data, ) @@ -246,15 +246,15 @@ def test_single_time_series(): system.add_time_series(gen1, ts) # type: ignore system.add_time_series(ts, gen1, gen2) - assert system.has_time_series(gen1, variable_name=variable_name) - assert system.has_time_series(gen2, variable_name=variable_name) - assert system.get_time_series(gen1, variable_name=variable_name) == ts - system.remove_time_series(gen1, gen2, variable_name=variable_name) + assert system.has_time_series(gen1, name=variable_name) + assert system.has_time_series(gen2, name=variable_name) + assert system.get_time_series(gen1, name=variable_name) == ts + system.remove_time_series(gen1, gen2, name=variable_name) with pytest.raises(ISNotStored): - system.get_time_series(gen1, variable_name=variable_name) + system.get_time_series(gen1, name=variable_name) - assert not system.has_time_series(gen1, variable_name=variable_name) - assert not system.has_time_series(gen2, variable_name=variable_name) + assert not system.has_time_series(gen1, name=variable_name) + assert not system.has_time_series(gen2, name=variable_name) TS_STORAGE_OPTIONS = ( @@ -295,14 +295,14 @@ def test_time_series_retrieval(storage_type, use_quantity): assert len(system.list_time_series_metadata(gen)) == 4 assert len(system.list_time_series_metadata(gen, scenario="high", model_year="2035")) == 1 assert ( - system.list_time_series_metadata(gen, scenario="high", model_year="2035")[ - 0 - ].user_attributes["model_year"] + system.list_time_series_metadata(gen, scenario="high", model_year="2035")[0].features[ + "model_year" + ] == "2035" ) assert len(system.list_time_series_metadata(gen, scenario="low")) == 2 for metadata in system.list_time_series_metadata(gen, scenario="high"): - assert metadata.user_attributes["scenario"] == "high" + assert metadata.features["scenario"] == "high" assert all( np.equal( @@ -332,22 +332,20 @@ def test_time_series_retrieval(storage_type, use_quantity): with pytest.raises(ISAlreadyAttached): system.add_time_series(ts4, gen, scenario="low", model_year="2035") - assert system.has_time_series(gen, variable_name=variable_name) - assert system.has_time_series(gen, variable_name=variable_name, scenario="high") - assert system.has_time_series( - gen, variable_name=variable_name, scenario="high", model_year="2030" - ) - assert not system.has_time_series(gen, variable_name=variable_name, model_year="2036") + assert system.has_time_series(gen, name=variable_name) + assert system.has_time_series(gen, name=variable_name, scenario="high") + assert system.has_time_series(gen, name=variable_name, scenario="high", model_year="2030") + assert not system.has_time_series(gen, name=variable_name, model_year="2036") with pytest.raises(ISOperationNotAllowed): - system.get_time_series(gen, variable_name=variable_name, scenario="high") + system.get_time_series(gen, name=variable_name, scenario="high") with pytest.raises(ISNotStored): - system.get_time_series(gen, variable_name=variable_name, scenario="medium") - assert len(system.list_time_series(gen, variable_name=variable_name, scenario="high")) == 2 - assert len(system.list_time_series(gen, variable_name=variable_name)) == 4 - system.remove_time_series(gen, variable_name=variable_name, scenario="high") - assert len(system.list_time_series(gen, variable_name=variable_name)) == 2 - system.remove_time_series(gen, variable_name=variable_name) - assert not system.has_time_series(gen, variable_name=variable_name) + system.get_time_series(gen, name=variable_name, scenario="medium") + assert len(system.list_time_series(gen, name=variable_name, scenario="high")) == 2 + assert len(system.list_time_series(gen, name=variable_name)) == 4 + system.remove_time_series(gen, name=variable_name, scenario="high") + assert len(system.list_time_series(gen, name=variable_name)) == 2 + system.remove_time_series(gen, name=variable_name) + assert not system.has_time_series(gen, name=variable_name) @pytest.mark.parametrize("storage_type", TS_STORAGE_OPTIONS) @@ -368,7 +366,7 @@ def test_open_time_series_store(storage_type: TimeSeriesStorageType): time_series_arrays.append(ts) with system.open_time_series_store() as conn: for i in range(5): - ts = system.get_time_series(gen, variable_name=f"ts{i}", connection=conn) + ts = system.get_time_series(gen, name=f"ts{i}", context=conn) assert np.array_equal( system.get_time_series(gen, f"ts{i}").data, time_series_arrays[i].data ) @@ -396,15 +394,15 @@ def test_time_series_removal(): system.add_time_series(ts, gen, scenario="low", model_year="2030") system.add_time_series(ts, gen, scenario="low", model_year="2035") - system.remove_time_series(gen1, variable_name="active_power") - system.remove_time_series(gen1, variable_name="reactive_power") - assert not system.list_time_series(gen1, variable_name="active_power") - assert not system.list_time_series(gen1, variable_name="reactive_power") - assert system.list_time_series(gen2, variable_name="active_power") - assert system.list_time_series(gen2, variable_name="reactive_power") + system.remove_time_series(gen1, name="active_power") + system.remove_time_series(gen1, name="reactive_power") + assert not system.list_time_series(gen1, name="active_power") + assert not system.list_time_series(gen1, name="reactive_power") + assert system.list_time_series(gen2, name="active_power") + assert system.list_time_series(gen2, name="reactive_power") system.remove_time_series(gen2) - assert not system.list_time_series(gen2, variable_name="active_power") - assert not system.list_time_series(gen2, variable_name="reactive_power") + assert not system.list_time_series(gen2, name="active_power") + assert not system.list_time_series(gen2, name="reactive_power") def test_time_series_read_only(): @@ -444,8 +442,8 @@ def test_serialize_time_series_from_array(tmp_path): system2 = SimpleSystem.from_json(filename, time_series_read_only=True) gen1b = system2.get_component(SimpleGenerator, gen1.name) with pytest.raises(ISOperationNotAllowed): - system2.remove_time_series(gen1b, variable_name=variable_name) - ts2 = system.get_time_series(gen1b, variable_name=variable_name) + system2.remove_time_series(gen1b, name=variable_name) + ts2 = system.get_time_series(gen1b, name=variable_name) assert ts2.data.tolist() == list(data) @@ -469,47 +467,45 @@ def test_time_series_slices(storage_type): first_timestamp = start second_timestamp = start + resolution last_timestamp = start + (length - 1) * resolution - ts_tmp = system.time_series.get(gen, variable_name=variable_name) + ts_tmp = system.time_series.get(gen, name=variable_name) assert isinstance(ts_tmp, SingleTimeSeries) assert len(ts_tmp.data) == length - ts_tmp = system.time_series.get(gen, variable_name=variable_name, length=10) + ts_tmp = system.time_series.get(gen, name=variable_name, length=10) assert isinstance(ts_tmp, SingleTimeSeries) assert len(ts_tmp.data) == 10 - ts2 = system.time_series.get( - gen, variable_name=variable_name, start_time=second_timestamp, length=5 - ) + ts2 = system.time_series.get(gen, name=variable_name, start_time=second_timestamp, length=5) assert isinstance(ts2, SingleTimeSeries) assert len(ts2.data) == 5 assert ts2.data.tolist() == data[1:6] - ts_tmp = system.time_series.get(gen, variable_name=variable_name, start_time=second_timestamp) + ts_tmp = system.time_series.get(gen, name=variable_name, start_time=second_timestamp) assert isinstance(ts_tmp, SingleTimeSeries) assert len(ts_tmp.data) == len(data) - 1 with pytest.raises(ISConflictingArguments, match="is less than"): system.time_series.get( gen, - variable_name=variable_name, + name=variable_name, start_time=first_timestamp - ts.resolution, length=5, ) with pytest.raises(ISConflictingArguments, match="is too large"): system.time_series.get( gen, - variable_name=variable_name, + name=variable_name, start_time=last_timestamp + ts.resolution, length=5, ) with pytest.raises(ISConflictingArguments, match="conflicts with initial_time"): system.time_series.get( gen, - variable_name=variable_name, + name=variable_name, start_time=first_timestamp + timedelta(minutes=1), ) with pytest.raises(ISConflictingArguments, match=r"start_time.*length.*conflicts with"): system.time_series.get( gen, - variable_name=variable_name, + name=variable_name, start_time=second_timestamp, length=len(data), ) @@ -636,9 +632,9 @@ def test_time_series_metadata_sql(): system.add_time_series(ts2, gen2) rows = system.time_series.metadata_store.sql( f""" - SELECT component_type, time_series_type, component_uuid, time_series_uuid - FROM {system.time_series.metadata_store.TABLE_NAME} - WHERE component_uuid = '{gen1.uuid}' + SELECT owner_type, time_series_type, owner_uuid, time_series_uuid + FROM {TIME_SERIES_ASSOCIATIONS_TABLE} + WHERE owner_uuid = '{gen1.uuid}' """ ) assert len(rows) == 1 @@ -664,14 +660,14 @@ def test_time_series_metadata_list_rows(): system.add_time_series(ts1, gen1) system.add_time_series(ts2, gen2) columns = [ - "component_type", + "owner_type", "time_series_type", - "component_uuid", + "owner_uuid", "time_series_uuid", ] rows = system.time_series.metadata_store.list_rows( gen2, - variable_name=variable_name, + name=variable_name, time_series_type=SingleTimeSeries.__name__, columns=columns, ) @@ -720,13 +716,23 @@ def add_time_series(iteration, initial_time, resolution): assert ts_counts.time_series_count == 2 * 10 assert ( ts_counts.time_series_type_count[ - ("SimpleGenerator", "SingleTimeSeries", "2020-01-01 02:00:00", "1:00:00") + ( + "SimpleGenerator", + "SingleTimeSeries", + "2020-01-01T02:00:00", + to_iso_8601(timedelta(hours=1)), + ) ] == 2 ) assert ( ts_counts.time_series_type_count[ - ("SimpleBus", "SingleTimeSeries", "2020-02-01 00:10:00", "0:05:00") + ( + "SimpleBus", + "SingleTimeSeries", + "2020-02-01T00:10:00", + to_iso_8601(timedelta(minutes=5)), + ) ] == 1 ) @@ -736,6 +742,84 @@ def test_system_printing(simple_system_with_time_series): simple_system_with_time_series.info() +def test_system_show_components(simple_system_with_time_series): + simple_system_with_time_series.show_components(SimpleBus) + simple_system_with_time_series.show_components(SimpleBus, show_uuid=True) + simple_system_with_time_series.show_components(SimpleBus, show_time_series=True) + simple_system_with_time_series.show_components(SimpleBus, show_supplemental=True) + + +def test_system_info_renders_supplemental_attributes_table( + simple_system_with_supplemental_attributes, + capsys, +): + """Test supplemental attributes appear in System table.""" + simple_system_with_supplemental_attributes.info() + + output = capsys.readouterr().out + + assert "Supplemental Attributes attached" in output + assert "3" in output # Total count + + +def test_system_info_no_table_when_empty(simple_system_with_time_series, capsys): + """Test supplemental attributes shows 0 when none exist.""" + simple_system_with_time_series.info() + + output = capsys.readouterr().out + assert "Supplemental Attributes attached" in output + + +def test_system_info_table_content_accuracy( + simple_system_with_supplemental_attributes, + capsys, +): + """Test supplemental attributes shows correct count.""" + system = simple_system_with_supplemental_attributes + + assert system.get_num_supplemental_attributes() == 3 + + system.info() + output = capsys.readouterr().out + + assert "Supplemental Attributes attached" in output + assert "3" in output + + +def test_single_attribute_type(simple_system): + """Test with only one attribute type.""" + from infrasys.location import GeographicInfo + + bus = simple_system.get_component(SimpleBus, "test-bus") + attr = GeographicInfo.example() + simple_system.add_supplemental_attribute(bus, attr) + + simple_system.info() # Should not crash + + counts = simple_system.get_supplemental_attribute_counts_by_type() + assert len(counts) == 1 + assert counts[0]["type"] == "GeographicInfo" + + +def test_many_supplemental_attributes(simple_system): + """Test with 50 attributes.""" + from infrasys.location import GeographicInfo + + components = [] + for i in range(10): + bus = SimpleBus(name=f"bus-{i}", voltage=1.1) + simple_system.add_component(bus) + components.append(bus) + + for i in range(50): + attr = GeographicInfo.example() + attr.geo_json["properties"]["name"] = f"Location-{i}" + simple_system.add_supplemental_attribute(components[i % 10], attr) + + simple_system.info() # Should not crash + assert simple_system.get_num_supplemental_attributes() == 50 + + def test_convert_chronify_to_arrow_in_deserialize(tmp_path): system = SimpleSystem(time_series_storage_type=TimeSeriesStorageType.CHRONIFY) assert isinstance(system.time_series.storage, ChronifyTimeSeriesStorage) @@ -780,7 +864,7 @@ def test_chronfiy_storage(): for expected_ts in time_series: actual_ts = system.get_time_series( - gen, time_series_type=SingleTimeSeries, variable_name=expected_ts.variable_name + gen, time_series_type=SingleTimeSeries, name=expected_ts.name ) assert np.array_equal(expected_ts.data, actual_ts.data) @@ -802,20 +886,20 @@ def test_bulk_add_time_series(): data = np.random.rand(length) name = f"test_ts_{length}_{i}" ts = SingleTimeSeries.from_array(data, name, initial_time, resolution) - key = system.add_time_series(ts, gen, connection=conn) + key = system.add_time_series(ts, gen, context=conn) keys.append(key) time_series.append(ts) for key in keys: - system.time_series.storage.check_timestamps(key, connection=conn.data_conn) + system.time_series.storage.check_timestamps(key, context=conn.data_context) with system.open_time_series_store() as conn: for expected_ts in time_series: actual_ts = system.get_time_series( gen, time_series_type=SingleTimeSeries, - variable_name=expected_ts.variable_name, - connection=conn, + name=expected_ts.name, + context=conn, ) assert np.array_equal(expected_ts.data, actual_ts.data) @@ -834,45 +918,8 @@ def test_bulk_add_time_series_with_rollback(storage_type: TimeSeriesStorageType) length = 10 data = np.random.rand(length) ts = SingleTimeSeries.from_array(data, ts_name, initial_time, resolution) - system.add_time_series(ts, gen, connection=conn) - assert system.has_time_series(gen, variable_name=ts_name) - system.add_time_series(ts, gen, connection=conn) - - assert not system.has_time_series(gen, variable_name=ts_name) - - -def test_time_series_uniqueness_queries(simple_system: SimpleSystem): - system = SimpleSystem(time_series_in_memory=True) - bus = SimpleBus(name="test-bus", voltage=1.1) - gen = SimpleGenerator(name="gen1", active_power=1.0, rating=1.0, bus=bus, available=True) - system.add_components(bus, gen) - variable_name = "active_power" - length = 24 - data = range(length) - start = datetime(year=2020, month=1, day=1) - resolution = timedelta(hours=1) - ts1 = SingleTimeSeries.from_array(data, variable_name, start, resolution) - system.add_time_series(ts1, gen) - - # This works because there is only one match. - assert isinstance(system.get_time_series(gen), SingleTimeSeries) - - length = 10 - data = range(length) - timestamps = [ - datetime(year=2030, month=1, day=1) + timedelta(seconds=5 * i) for i in range(length) - ] - ts2 = NonSequentialTimeSeries.from_array( - data=data, variable_name=variable_name, timestamps=timestamps - ) - system.add_time_series(ts2, gen) - with pytest.raises(ISOperationNotAllowed): - system.get_time_series(gen) + system.add_time_series(ts, gen, context=conn) + assert system.has_time_series(gen, name=ts_name) + system.add_time_series(ts, gen, context=conn) - assert isinstance( - system.get_time_series(gen, time_series_type=SingleTimeSeries), SingleTimeSeries - ) - assert isinstance( - system.get_time_series(gen, time_series_type=NonSequentialTimeSeries), - NonSequentialTimeSeries, - ) + assert not system.has_time_series(gen, name=ts_name) diff --git a/tests/test_time_series_metadata_store_migration.py b/tests/test_time_series_metadata_store_migration.py new file mode 100644 index 0000000..b833295 --- /dev/null +++ b/tests/test_time_series_metadata_store_migration.py @@ -0,0 +1,98 @@ +import pytest + +from infrasys import TIME_SERIES_METADATA_TABLE +from infrasys.migrations.db_migrations import ( + metadata_store_needs_migration, + migrate_legacy_metadata_store, +) +from infrasys.time_series_metadata_store import TimeSeriesMetadataStore +from infrasys.utils.sqlite import create_in_memory_db, execute + +from .models.simple_system import SimpleSystem + + +@pytest.fixture +def legacy_system(pytestconfig): + return pytestconfig.rootpath.joinpath("tests/data/legacy_system.json") + + +@pytest.fixture(scope="function") +def legacy_db(): + legacy_columns = [ + "id", + "time_series_uuid", + "time_series_type", + "initial_time", + "resolution", + "variable_name", + "component_uuid", + "component_type", + "user_attributes_hash", + "metadata", + ] + conn = create_in_memory_db() + schema_text = ",".join(legacy_columns) + cur = conn.cursor() + execute(cur, f"CREATE TABLE {TIME_SERIES_METADATA_TABLE}({schema_text})") + old_schema_data = ( + 1, + "33d47754-ff74-44d8-b279-2eac914d1d5e", + "SingleTimeSeries", + "2020-01-01 00:00:00", + "1:00:00", + "active_power", + "d65fa5b9-a735-4b79-b880-27a5058c533e", + "SimpleGenerator", + None, + '{"variable_name": "active_power", "initial_time": "2020-01-01T00:00:00", "resolution": "PT1H", "time_series_uuid": "33d47754-ff74-44d8-b279-2eac914d1d5e", "user_attributes": {}, "quantity_metadata": {"module": "infrasys.quantities", "quantity_type": "ActivePower", "units": "watt"}, "normalization": {"test":true}, "type": "SingleTimeSeries", "length": 10, "__metadata__": {"fields": {"module": "infrasys.time_series_models", "type": "SingleTimeSeriesMetadata", "serialized_type": "base"}}}', + ) + placeholders = ", ".join("?" * len(old_schema_data)) + breakpoint() + execute(cur, f"INSERT INTO {TIME_SERIES_METADATA_TABLE}({placeholders})", old_schema_data) + conn.commit() + yield conn + conn.close() + + +def test_metadata_version_detection(): + conn = create_in_memory_db() + metadata_store = TimeSeriesMetadataStore(conn, initialize=True) + + assert isinstance(metadata_store, TimeSeriesMetadataStore) + assert not metadata_store_needs_migration(conn) + + +def test_migrate_old_system(legacy_system): + system = SimpleSystem.from_json(legacy_system) + conn = system._time_series_mgr._metadata_store._con + tables = [row[0] for row in conn.execute("SELECT name FROM sqlite_master WHERE type='table'")] + assert "time_series_associations" in tables + + +def test_migrate_without_columns(legacy_system): + conn = create_in_memory_db() + conn.execute(f"CREATE TABlE {TIME_SERIES_METADATA_TABLE}(id, test)") + with pytest.raises(NotImplementedError): + migrate_legacy_metadata_store(conn) + + +def test_migrating_schema_with_no_entires(caplog): + legacy_columns = [ + "id", + "time_series_uuid", + "time_series_type", + "initial_time", + "resolution", + "variable_name", + "component_uuid", + "component_type", + "normalization", + "user_attributes_hash", + "metadata", + ] + conn = create_in_memory_db() + schema_text = ",".join(legacy_columns) + cur = conn.cursor() + execute(cur, f"CREATE TABLE {TIME_SERIES_METADATA_TABLE}({schema_text})") + conn.commit() + assert migrate_legacy_metadata_store(conn) diff --git a/tests/test_time_utils.py b/tests/test_time_utils.py index 5c2c32b..19f328f 100644 --- a/tests/test_time_utils.py +++ b/tests/test_time_utils.py @@ -3,7 +3,7 @@ import pytest from dateutil.relativedelta import relativedelta -from infrasys.utils.time_utils import from_iso_8601, to_iso_8601 +from infrasys.utils.time_utils import from_iso_8601, str_timedelta_to_iso_8601, to_iso_8601 def test_to_iso_8601(): @@ -73,6 +73,21 @@ def test_duration_with_relative_delta(): assert result == "P1Y" +def test_str_timedelta_to_iso_8601(): + str_delta = str(timedelta(hours=1)) + result = str_timedelta_to_iso_8601(str_delta) + assert result + assert result == "P0DT1H" + + str_delta = str(timedelta(minutes=30)) + result = str_timedelta_to_iso_8601(str_delta) + assert result + assert result == "P0DT30M" + + with pytest.raises(ValueError): + _ = str_timedelta_to_iso_8601("test") + + @pytest.mark.parametrize( "input_value, result", [ diff --git a/tests/test_utils_sqlite.py b/tests/test_utils_sqlite.py new file mode 100644 index 0000000..b8deb17 --- /dev/null +++ b/tests/test_utils_sqlite.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import gc +from pathlib import Path + +from infrasys.utils.sqlite import ( + ManagedConnection, + backup, + create_in_memory_db, + execute, + restore, +) + + +def test_create_in_memory_db_is_managed_connection() -> None: + with create_in_memory_db() as con: + assert isinstance(con, ManagedConnection) + cur = con.cursor() + execute(cur, "CREATE TABLE test (id INTEGER)") + execute(cur, "INSERT INTO test VALUES (?)", (1,)) + con.commit() + + # Close is idempotent + con.close() + + +def test_backup_and_restore(tmp_path: Path) -> None: + src = create_in_memory_db() + cur = src.cursor() + execute(cur, "CREATE TABLE t (val INTEGER)") + execute(cur, "INSERT INTO t VALUES (42)") + src.commit() + + backup_file = tmp_path / "backup.db" + backup(src, backup_file) + + dst = create_in_memory_db() + restore(dst, backup_file) + val = dst.execute("SELECT val FROM t").fetchone()[0] + assert val == 42 + + src.close() + dst.close() + + +def test_connection_auto_close_on_gc() -> None: + con: ManagedConnection | None = create_in_memory_db() + assert con is not None + assert con.__dict__.get("_closed", False) is False + # Explicitly invoke cleanup to exercise __del__ path. + con.__del__() # type: ignore[operator] + assert con.__dict__.get("_closed", False) is True + + con = None # noqa: PLW0642 + gc.collect() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..4723599 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1794 @@ +version = 1 +revision = 3 +requires-python = ">=3.11, <3.14" +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version < '3.12'", +] + +[[package]] +name = "accessible-pygments" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c1/bbac6a50d02774f91572938964c582fff4270eee73ab822a4aeea4d8b11b/accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872", size = 1377899, upload-time = "2024-05-10T11:23:10.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/3f/95338030883d8c8b91223b4e21744b04d11b161a3ef117295d8241f50ab4/accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7", size = 1395903, upload-time = "2024-05-10T11:23:08.421Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "asttokens" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, +] + +[[package]] +name = "autodoc-pydantic" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "sphinx" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/df/87120e2195f08d760bc5cf8a31cfa2381a6887517aa89453b23f1ae3354f/autodoc_pydantic-2.2.0-py3-none-any.whl", hash = "sha256:8c6a36fbf6ed2700ea9c6d21ea76ad541b621fbdf16b5a80ee04673548af4d95", size = 34001, upload-time = "2024-04-27T10:57:00.542Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "chronify" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "duckdb" }, + { name = "duckdb-engine" }, + { name = "loguru" }, + { name = "pandas" }, + { name = "pyarrow" }, + { name = "pydantic" }, + { name = "pytz" }, + { name = "rich" }, + { name = "sqlalchemy" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/32/e43b68e1da0e4b0db04b9456722c88300627f97ee6394f4fa9f27cea3f52/chronify-0.3.1.tar.gz", hash = "sha256:cd2a8828eef0d4fedff20e712226808f4f5b942f08af14dfc00b179900bc2213", size = 156989, upload-time = "2025-08-12T00:17:35.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/d5/7e15411513d6a9decb30a85a032946f0a4264033afdfb0160e4456f460c5/chronify-0.3.1-py3-none-any.whl", hash = "sha256:c4b64117b48b8e8337ed38f76a04f80aa25be5aed1cc419dcf95918f906496bc", size = 57494, upload-time = "2025-08-12T00:17:34.494Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "duckdb" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/d7/ec014b351b6bb026d5f473b1d0ec6bd6ba40786b9abbf530b4c9041d9895/duckdb-1.1.3.tar.gz", hash = "sha256:68c3a46ab08836fe041d15dcbf838f74a990d551db47cb24ab1c4576fc19351c", size = 12240672, upload-time = "2024-11-04T14:03:28.533Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/d0/96127582230183dc36f1209d5e8e67f54b3459b3b9794603305d816f350a/duckdb-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4f0e2e5a6f5a53b79aee20856c027046fba1d73ada6178ed8467f53c3877d5e0", size = 15469495, upload-time = "2024-11-04T14:01:28.506Z" }, + { url = "https://files.pythonhosted.org/packages/70/07/b78b435f8fe85c23ee2d49a01dc9599bb4a272c40f2a6bf67ff75958bdad/duckdb-1.1.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:911d58c22645bfca4a5a049ff53a0afd1537bc18fedb13bc440b2e5af3c46148", size = 32318595, upload-time = "2024-11-04T14:01:31.182Z" }, + { url = "https://files.pythonhosted.org/packages/6c/d8/253b3483fc554daf72503ba0f112404f75be6bbd7ca7047e804873cbb182/duckdb-1.1.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:c443d3d502335e69fc1e35295fcfd1108f72cb984af54c536adfd7875e79cee5", size = 16934057, upload-time = "2024-11-04T14:01:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/f8/11/908a8fb73cef8304d3f4eab7f27cc489f6fd675f921d382c83c55253be86/duckdb-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a55169d2d2e2e88077d91d4875104b58de45eff6a17a59c7dc41562c73df4be", size = 18498214, upload-time = "2024-11-04T14:01:37.118Z" }, + { url = "https://files.pythonhosted.org/packages/bf/56/f627b6fcd4aa34015a15449d852ccb78d7cc6eda654aa20c1d378e99fa76/duckdb-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0767ada9f06faa5afcf63eb7ba1befaccfbcfdac5ff86f0168c673dd1f47aa", size = 20149376, upload-time = "2024-11-04T14:01:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1d/c318dada688119b9ca975d431f9b38bde8dda41b6d18cc06e0dc52123788/duckdb-1.1.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51c6d79e05b4a0933672b1cacd6338f882158f45ef9903aef350c4427d9fc898", size = 18293289, upload-time = "2024-11-04T14:01:43.186Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/fd346444b270ffe52e06c1af1243eaae30ab651c1d59f51711e3502fd060/duckdb-1.1.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:183ac743f21c6a4d6adfd02b69013d5fd78e5e2cd2b4db023bc8a95457d4bc5d", size = 21622129, upload-time = "2024-11-04T14:01:45.851Z" }, + { url = "https://files.pythonhosted.org/packages/18/aa/804c1cf5077b6f17d752b23637d9ef53eaad77ea73ee43d4c12bff480e36/duckdb-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:a30dd599b8090ea6eafdfb5a9f1b872d78bac318b6914ada2d35c7974d643640", size = 10954756, upload-time = "2024-11-04T14:01:47.976Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ff/7ee500f4cff0d2a581c1afdf2c12f70ee3bf1a61041fea4d88934a35a7a3/duckdb-1.1.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:a433ae9e72c5f397c44abdaa3c781d94f94f4065bcbf99ecd39433058c64cb38", size = 15482881, upload-time = "2024-11-04T14:01:50.842Z" }, + { url = "https://files.pythonhosted.org/packages/28/16/dda10da6bde54562c3cb0002ca3b7678e3108fa73ac9b7509674a02c5249/duckdb-1.1.3-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:d08308e0a46c748d9c30f1d67ee1143e9c5ea3fbcccc27a47e115b19e7e78aa9", size = 32349440, upload-time = "2024-11-04T14:01:53.772Z" }, + { url = "https://files.pythonhosted.org/packages/2e/c2/06f7f7a51a1843c9384e1637abb6bbebc29367710ffccc7e7e52d72b3dd9/duckdb-1.1.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5d57776539211e79b11e94f2f6d63de77885f23f14982e0fac066f2885fcf3ff", size = 16953473, upload-time = "2024-11-04T14:01:56.367Z" }, + { url = "https://files.pythonhosted.org/packages/1a/84/9991221ef7dde79d85231f20646e1b12d645490cd8be055589276f62847e/duckdb-1.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e59087dbbb63705f2483544e01cccf07d5b35afa58be8931b224f3221361d537", size = 18491915, upload-time = "2024-11-04T14:01:59.518Z" }, + { url = "https://files.pythonhosted.org/packages/aa/76/330fe16f12b7ddda0c664ba9869f3afbc8773dbe17ae750121d407dc0f37/duckdb-1.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ebf5f60ddbd65c13e77cddb85fe4af671d31b851f125a4d002a313696af43f1", size = 20150288, upload-time = "2024-11-04T14:02:01.865Z" }, + { url = "https://files.pythonhosted.org/packages/c4/88/e4b08b7a5d08c0f65f6c7a6594de64431ce7df38d7258511417ba7989ad3/duckdb-1.1.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4ef7ba97a65bd39d66f2a7080e6fb60e7c3e41d4c1e19245f90f53b98e3ac32", size = 18296560, upload-time = "2024-11-04T14:02:04.242Z" }, + { url = "https://files.pythonhosted.org/packages/1a/32/011e6e3ce14375a1ba01a588c119ad82be757f847c6b60207e0762d9ec3a/duckdb-1.1.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f58db1b65593ff796c8ea6e63e2e144c944dd3d51c8d8e40dffa7f41693d35d3", size = 21635270, upload-time = "2024-11-04T14:02:06.511Z" }, + { url = "https://files.pythonhosted.org/packages/f2/eb/58d4e0eccdc7b3523c062d008ad9eef28edccf88591d1a78659c809fe6e8/duckdb-1.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:e86006958e84c5c02f08f9b96f4bc26990514eab329b1b4f71049b3727ce5989", size = 10955715, upload-time = "2024-11-04T14:02:09.122Z" }, + { url = "https://files.pythonhosted.org/packages/81/d1/2462492531d4715b2ede272a26519b37f21cf3f8c85b3eb88da5b7be81d8/duckdb-1.1.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0897f83c09356206ce462f62157ce064961a5348e31ccb2a557a7531d814e70e", size = 15483282, upload-time = "2024-11-04T14:02:11.853Z" }, + { url = "https://files.pythonhosted.org/packages/af/a5/ec595aa223b911a62f24393908a8eaf8e0ed1c7c07eca5008f22aab070bc/duckdb-1.1.3-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:cddc6c1a3b91dcc5f32493231b3ba98f51e6d3a44fe02839556db2b928087378", size = 32350342, upload-time = "2024-11-04T14:02:15.893Z" }, + { url = "https://files.pythonhosted.org/packages/08/27/e35116ab1ada5e54e52424e52d16ee9ae82db129025294e19c1d48a8b2b1/duckdb-1.1.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:1d9ab6143e73bcf17d62566e368c23f28aa544feddfd2d8eb50ef21034286f24", size = 16953863, upload-time = "2024-11-04T14:02:19.223Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ac/f2db3969a56cd96a3ba78b0fd161939322fb134bd07c98ecc7a7015d3efa/duckdb-1.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f073d15d11a328f2e6d5964a704517e818e930800b7f3fa83adea47f23720d3", size = 18494301, upload-time = "2024-11-04T14:02:22.299Z" }, + { url = "https://files.pythonhosted.org/packages/cf/66/d0be7c9518b1b92185018bacd851f977a101c9818686f667bbf884abcfbc/duckdb-1.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5724fd8a49e24d730be34846b814b98ba7c304ca904fbdc98b47fa95c0b0cee", size = 20150992, upload-time = "2024-11-04T14:02:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/47/ae/c2df66e3716705f48775e692a1b8accbf3dc6e2c27a0ae307fb4b063e115/duckdb-1.1.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51e7dbd968b393343b226ab3f3a7b5a68dee6d3fe59be9d802383bf916775cb8", size = 18297818, upload-time = "2024-11-04T14:02:27.994Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7e/10310b754b7ec3349c411a0a88ecbf327c49b5714e3d35200e69c13fb093/duckdb-1.1.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00cca22df96aa3473fe4584f84888e2cf1c516e8c2dd837210daec44eadba586", size = 21635169, upload-time = "2024-11-04T14:02:30.702Z" }, + { url = "https://files.pythonhosted.org/packages/83/be/46c0b89c9d4e1ba90af9bc184e88672c04d420d41342e4dc359c78d05981/duckdb-1.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:77f26884c7b807c7edd07f95cf0b00e6d47f0de4a534ac1706a58f8bc70d0d31", size = 10955826, upload-time = "2024-11-04T14:02:33.865Z" }, +] + +[[package]] +name = "duckdb-engine" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "duckdb" }, + { name = "packaging" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/d5/c0d8d0a4ca3ffea92266f33d92a375e2794820ad89f9be97cf0c9a9697d0/duckdb_engine-0.17.0.tar.gz", hash = "sha256:396b23869754e536aa80881a92622b8b488015cf711c5a40032d05d2cf08f3cf", size = 48054, upload-time = "2025-03-29T09:49:17.663Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/a2/e90242f53f7ae41554419b1695b4820b364df87c8350aa420b60b20cab92/duckdb_engine-0.17.0-py3-none-any.whl", hash = "sha256:3aa72085e536b43faab635f487baf77ddc5750069c16a2f8d9c6c3cb6083e979", size = 49676, upload-time = "2025-03-29T09:49:15.564Z" }, +] + +[[package]] +name = "executing" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, +] + +[[package]] +name = "filelock" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "flexcache" +version = "0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/b0/8a21e330561c65653d010ef112bf38f60890051d244ede197ddaa08e50c1/flexcache-0.3.tar.gz", hash = "sha256:18743bd5a0621bfe2cf8d519e4c3bfdf57a269c15d1ced3fb4b64e0ff4600656", size = 15816, upload-time = "2024-03-09T03:21:07.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/cd/c883e1a7c447479d6e13985565080e3fea88ab5a107c21684c813dba1875/flexcache-0.3-py3-none-any.whl", hash = "sha256:d43c9fea82336af6e0115e308d9d33a185390b8346a017564611f1466dcd2e32", size = 13263, upload-time = "2024-03-09T03:21:05.635Z" }, +] + +[[package]] +name = "flexparser" +version = "0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/99/b4de7e39e8eaf8207ba1a8fa2241dd98b2ba72ae6e16960d8351736d8702/flexparser-0.4.tar.gz", hash = "sha256:266d98905595be2ccc5da964fe0a2c3526fbbffdc45b65b3146d75db992ef6b2", size = 31799, upload-time = "2024-11-07T02:00:56.249Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/5e/3be305568fe5f34448807976dc82fc151d76c3e0e03958f34770286278c1/flexparser-0.4-py3-none-any.whl", hash = "sha256:3738b456192dcb3e15620f324c447721023c0293f6af9955b481e91d00179846", size = 27625, upload-time = "2024-11-07T02:00:54.523Z" }, +] + +[[package]] +name = "furo" +version = "2025.9.25" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "accessible-pygments" }, + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/29/ff3b83a1ffce74676043ab3e7540d398e0b1ce7660917a00d7c4958b93da/furo-2025.9.25.tar.gz", hash = "sha256:3eac05582768fdbbc2bdfa1cdbcdd5d33cfc8b4bd2051729ff4e026a1d7e0a98", size = 1662007, upload-time = "2025-09-25T21:37:19.221Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/69/964b55f389c289e16ba2a5dfe587c3c462aac09e24123f09ddf703889584/furo-2025.9.25-py3-none-any.whl", hash = "sha256:2937f68e823b8e37b410c972c371bc2b1d88026709534927158e0cb3fac95afe", size = 340409, upload-time = "2025-09-25T21:37:17.244Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, +] + +[[package]] +name = "h5py" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/6a/0d79de0b025aa85dc8864de8e97659c94cf3d23148394a954dc5ca52f8c8/h5py-3.15.1.tar.gz", hash = "sha256:c86e3ed45c4473564de55aa83b6fc9e5ead86578773dfbd93047380042e26b69", size = 426236, upload-time = "2025-10-16T10:35:27.404Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/fd/8349b48b15b47768042cff06ad6e1c229f0a4bd89225bf6b6894fea27e6d/h5py-3.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aaa330bcbf2830150c50897ea5dcbed30b5b6d56897289846ac5b9e529ec243", size = 3434135, upload-time = "2025-10-16T10:33:47.954Z" }, + { url = "https://files.pythonhosted.org/packages/c1/b0/1c628e26a0b95858f54aba17e1599e7f6cd241727596cc2580b72cb0a9bf/h5py-3.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c970fb80001fffabb0109eaf95116c8e7c0d3ca2de854e0901e8a04c1f098509", size = 2870958, upload-time = "2025-10-16T10:33:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e3/c255cafc9b85e6ea04e2ad1bba1416baa1d7f57fc98a214be1144087690c/h5py-3.15.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80e5bb5b9508d5d9da09f81fd00abbb3f85da8143e56b1585d59bc8ceb1dba8b", size = 4504770, upload-time = "2025-10-16T10:33:54.357Z" }, + { url = "https://files.pythonhosted.org/packages/8b/23/4ab1108e87851ccc69694b03b817d92e142966a6c4abd99e17db77f2c066/h5py-3.15.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b849ba619a066196169763c33f9f0f02e381156d61c03e000bb0100f9950faf", size = 4700329, upload-time = "2025-10-16T10:33:57.616Z" }, + { url = "https://files.pythonhosted.org/packages/a4/e4/932a3a8516e4e475b90969bf250b1924dbe3612a02b897e426613aed68f4/h5py-3.15.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e7f6c841efd4e6e5b7e82222eaf90819927b6d256ab0f3aca29675601f654f3c", size = 4152456, upload-time = "2025-10-16T10:34:00.843Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0a/f74d589883b13737021b2049ac796328f188dbb60c2ed35b101f5b95a3fc/h5py-3.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca8a3a22458956ee7b40d8e39c9a9dc01f82933e4c030c964f8b875592f4d831", size = 4617295, upload-time = "2025-10-16T10:34:04.154Z" }, + { url = "https://files.pythonhosted.org/packages/23/95/499b4e56452ef8b6c95a271af0dde08dac4ddb70515a75f346d4f400579b/h5py-3.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:550e51131376889656feec4aff2170efc054a7fe79eb1da3bb92e1625d1ac878", size = 2882129, upload-time = "2025-10-16T10:34:06.886Z" }, + { url = "https://files.pythonhosted.org/packages/ce/bb/cfcc70b8a42222ba3ad4478bcef1791181ea908e2adbd7d53c66395edad5/h5py-3.15.1-cp311-cp311-win_arm64.whl", hash = "sha256:b39239947cb36a819147fc19e86b618dcb0953d1cd969f5ed71fc0de60392427", size = 2477121, upload-time = "2025-10-16T10:34:09.579Z" }, + { url = "https://files.pythonhosted.org/packages/62/b8/c0d9aa013ecfa8b7057946c080c0c07f6fa41e231d2e9bd306a2f8110bdc/h5py-3.15.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:316dd0f119734f324ca7ed10b5627a2de4ea42cc4dfbcedbee026aaa361c238c", size = 3399089, upload-time = "2025-10-16T10:34:12.135Z" }, + { url = "https://files.pythonhosted.org/packages/a4/5e/3c6f6e0430813c7aefe784d00c6711166f46225f5d229546eb53032c3707/h5py-3.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51469890e58e85d5242e43aab29f5e9c7e526b951caab354f3ded4ac88e7b76", size = 2847803, upload-time = "2025-10-16T10:34:14.564Z" }, + { url = "https://files.pythonhosted.org/packages/00/69/ba36273b888a4a48d78f9268d2aee05787e4438557450a8442946ab8f3ec/h5py-3.15.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a33bfd5dfcea037196f7778534b1ff7e36a7f40a89e648c8f2967292eb6898e", size = 4914884, upload-time = "2025-10-16T10:34:18.452Z" }, + { url = "https://files.pythonhosted.org/packages/3a/30/d1c94066343a98bb2cea40120873193a4fed68c4ad7f8935c11caf74c681/h5py-3.15.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25c8843fec43b2cc368aa15afa1cdf83fc5e17b1c4e10cd3771ef6c39b72e5ce", size = 5109965, upload-time = "2025-10-16T10:34:21.853Z" }, + { url = "https://files.pythonhosted.org/packages/81/3d/d28172116eafc3bc9f5991b3cb3fd2c8a95f5984f50880adfdf991de9087/h5py-3.15.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a308fd8681a864c04423c0324527237a0484e2611e3441f8089fd00ed56a8171", size = 4561870, upload-time = "2025-10-16T10:34:26.69Z" }, + { url = "https://files.pythonhosted.org/packages/a5/83/393a7226024238b0f51965a7156004eaae1fcf84aa4bfecf7e582676271b/h5py-3.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f4a016df3f4a8a14d573b496e4d1964deb380e26031fc85fb40e417e9131888a", size = 5037161, upload-time = "2025-10-16T10:34:30.383Z" }, + { url = "https://files.pythonhosted.org/packages/cf/51/329e7436bf87ca6b0fe06dd0a3795c34bebe4ed8d6c44450a20565d57832/h5py-3.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:59b25cf02411bf12e14f803fef0b80886444c7fe21a5ad17c6a28d3f08098a1e", size = 2874165, upload-time = "2025-10-16T10:34:33.461Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/2d02b10a66747c54446e932171dd89b8b4126c0111b440e6bc05a7c852ec/h5py-3.15.1-cp312-cp312-win_arm64.whl", hash = "sha256:61d5a58a9851e01ee61c932bbbb1c98fe20aba0a5674776600fb9a361c0aa652", size = 2458214, upload-time = "2025-10-16T10:34:35.733Z" }, + { url = "https://files.pythonhosted.org/packages/88/b3/40207e0192415cbff7ea1d37b9f24b33f6d38a5a2f5d18a678de78f967ae/h5py-3.15.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8440fd8bee9500c235ecb7aa1917a0389a2adb80c209fa1cc485bd70e0d94a5", size = 3376511, upload-time = "2025-10-16T10:34:38.596Z" }, + { url = "https://files.pythonhosted.org/packages/31/96/ba99a003c763998035b0de4c299598125df5fc6c9ccf834f152ddd60e0fb/h5py-3.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ab2219dbc6fcdb6932f76b548e2b16f34a1f52b7666e998157a4dfc02e2c4123", size = 2826143, upload-time = "2025-10-16T10:34:41.342Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c2/fc6375d07ea3962df7afad7d863fe4bde18bb88530678c20d4c90c18de1d/h5py-3.15.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8cb02c3a96255149ed3ac811eeea25b655d959c6dd5ce702c9a95ff11859eb5", size = 4908316, upload-time = "2025-10-16T10:34:44.619Z" }, + { url = "https://files.pythonhosted.org/packages/d9/69/4402ea66272dacc10b298cca18ed73e1c0791ff2ae9ed218d3859f9698ac/h5py-3.15.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:121b2b7a4c1915d63737483b7bff14ef253020f617c2fb2811f67a4bed9ac5e8", size = 5103710, upload-time = "2025-10-16T10:34:48.639Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f6/11f1e2432d57d71322c02a97a5567829a75f223a8c821764a0e71a65cde8/h5py-3.15.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59b0d63b318bf3cc06687def2b45afd75926bbc006f7b8cd2b1a231299fc8599", size = 4556042, upload-time = "2025-10-16T10:34:51.841Z" }, + { url = "https://files.pythonhosted.org/packages/18/88/3eda3ef16bfe7a7dbc3d8d6836bbaa7986feb5ff091395e140dc13927bcc/h5py-3.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e02fe77a03f652500d8bff288cbf3675f742fc0411f5a628fa37116507dc7cc0", size = 5030639, upload-time = "2025-10-16T10:34:55.257Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ea/fbb258a98863f99befb10ed727152b4ae659f322e1d9c0576f8a62754e81/h5py-3.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:dea78b092fd80a083563ed79a3171258d4a4d307492e7cf8b2313d464c82ba52", size = 2864363, upload-time = "2025-10-16T10:34:58.099Z" }, + { url = "https://files.pythonhosted.org/packages/5d/c9/35021cc9cd2b2915a7da3026e3d77a05bed1144a414ff840953b33937fb9/h5py-3.15.1-cp313-cp313-win_arm64.whl", hash = "sha256:c256254a8a81e2bddc0d376e23e2a6d2dc8a1e8a2261835ed8c1281a0744cd97", size = 2449570, upload-time = "2025-10-16T10:35:00.473Z" }, +] + +[[package]] +name = "h5py-stubs" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "h5py" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/a0/65cbd1422dbc6bcccb3c7f11755c44b67e487b01ccd7e098648fdf6bea6a/h5py_stubs-0.1.2.tar.gz", hash = "sha256:f984bf66bc2cce02fd89d91b64dd0489b0014b2e79f2be6dc56f0bf38ef4e759", size = 3074, upload-time = "2025-09-09T16:21:10.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/52/b8eea98dd5e9167ed9162a700cdb3040dfcf52e285bd33c548a6fb5f6a8c/h5py_stubs-0.1.2-py3-none-any.whl", hash = "sha256:22899b06f7cfe028ba8eabf9aebee79d6facd0aeaee965e65bc290df1360a5ca", size = 6383, upload-time = "2025-09-09T16:21:09.604Z" }, +] + +[[package]] +name = "identify" +version = "2.6.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "infrasys" +version = "1.0.0rc3" +source = { editable = "." } +dependencies = [ + { name = "h5py" }, + { name = "loguru" }, + { name = "numpy" }, + { name = "orjson" }, + { name = "pandas" }, + { name = "pint" }, + { name = "pyarrow" }, + { name = "pydantic" }, + { name = "python-dateutil" }, + { name = "rich" }, +] + +[package.optional-dependencies] +chronify = [ + { name = "chronify" }, +] + +[package.dev-dependencies] +dev = [ + { name = "h5py-stubs" }, + { name = "infrasys", extra = ["chronify"] }, + { name = "ipython" }, + { name = "mypy" }, + { name = "pandas-stubs" }, + { name = "pre-commit" }, + { name = "pyarrow-stubs" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-missing-modules" }, + { name = "ruff" }, + { name = "types-python-dateutil" }, +] +docs = [ + { name = "autodoc-pydantic" }, + { name = "furo" }, + { name = "ipython" }, + { name = "myst-parser" }, + { name = "pytest-missing-modules" }, + { name = "sphinx" }, + { name = "sphinx-click" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-tabs" }, + { name = "types-python-dateutil" }, +] + +[package.metadata] +requires-dist = [ + { name = "chronify", marker = "extra == 'chronify'", specifier = "~=0.3.1" }, + { name = "h5py", specifier = ">=3.13,<4" }, + { name = "loguru", specifier = ">=0.7,<0.8" }, + { name = "numpy", specifier = ">=2,<3" }, + { name = "orjson", specifier = ">=3.11.5,<4" }, + { name = "pandas", specifier = ">=2,<3" }, + { name = "pint", specifier = "~=0.23" }, + { name = "pyarrow", specifier = ">=21,<23" }, + { name = "pydantic", specifier = ">=2.12,<3" }, + { name = "python-dateutil", specifier = ">=2.9,<3" }, + { name = "rich", specifier = ">=13.7,<14" }, +] +provides-extras = ["chronify"] + +[package.metadata.requires-dev] +dev = [ + { name = "h5py-stubs", specifier = ">=0.1.2" }, + { name = "infrasys", extras = ["chronify"] }, + { name = "ipython", specifier = ">=9.1.0" }, + { name = "mypy", specifier = ">=1.13,<2" }, + { name = "pandas-stubs" }, + { name = "pre-commit" }, + { name = "pyarrow-stubs" }, + { name = "pytest" }, + { name = "pytest-cov" }, + { name = "pytest-missing-modules", specifier = ">=0.2.1" }, + { name = "ruff" }, + { name = "types-python-dateutil", specifier = ">=2.9.0.20241206" }, +] +docs = [ + { name = "autodoc-pydantic", specifier = "~=2.0" }, + { name = "furo" }, + { name = "ipython", specifier = ">=9.1.0" }, + { name = "myst-parser" }, + { name = "pytest-missing-modules", specifier = ">=0.2.1" }, + { name = "sphinx" }, + { name = "sphinx-click" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-tabs", specifier = "~=3.4" }, + { name = "types-python-dateutil", specifier = ">=2.9.0.20241206" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "ipython" +version = "9.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "ipython-pygments-lexers" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170, upload-time = "2025-09-29T10:55:47.676Z" }, +] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, +] + +[[package]] +name = "jedi" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "loguru" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "win32-setctime", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "myst-parser" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "numpy" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/45/e80d203ef6b267aa29b22714fb558930b27960a0c5ce3c19c999232bb3eb/numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d", size = 21259253, upload-time = "2025-09-09T15:56:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/52/18/cf2c648fccf339e59302e00e5f2bc87725a3ce1992f30f3f78c9044d7c43/numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569", size = 14450980, upload-time = "2025-09-09T15:56:05.926Z" }, + { url = "https://files.pythonhosted.org/packages/93/fb/9af1082bec870188c42a1c239839915b74a5099c392389ff04215dcee812/numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f", size = 5379709, upload-time = "2025-09-09T15:56:07.95Z" }, + { url = "https://files.pythonhosted.org/packages/75/0f/bfd7abca52bcbf9a4a65abc83fe18ef01ccdeb37bfb28bbd6ad613447c79/numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125", size = 6913923, upload-time = "2025-09-09T15:56:09.443Z" }, + { url = "https://files.pythonhosted.org/packages/79/55/d69adad255e87ab7afda1caf93ca997859092afeb697703e2f010f7c2e55/numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48", size = 14589591, upload-time = "2025-09-09T15:56:11.234Z" }, + { url = "https://files.pythonhosted.org/packages/10/a2/010b0e27ddeacab7839957d7a8f00e91206e0c2c47abbb5f35a2630e5387/numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6", size = 16938714, upload-time = "2025-09-09T15:56:14.637Z" }, + { url = "https://files.pythonhosted.org/packages/1c/6b/12ce8ede632c7126eb2762b9e15e18e204b81725b81f35176eac14dc5b82/numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa", size = 16370592, upload-time = "2025-09-09T15:56:17.285Z" }, + { url = "https://files.pythonhosted.org/packages/b4/35/aba8568b2593067bb6a8fe4c52babb23b4c3b9c80e1b49dff03a09925e4a/numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30", size = 18884474, upload-time = "2025-09-09T15:56:20.943Z" }, + { url = "https://files.pythonhosted.org/packages/45/fa/7f43ba10c77575e8be7b0138d107e4f44ca4a1ef322cd16980ea3e8b8222/numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57", size = 6599794, upload-time = "2025-09-09T15:56:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a2/a4f78cb2241fe5664a22a10332f2be886dcdea8784c9f6a01c272da9b426/numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa", size = 13088104, upload-time = "2025-09-09T15:56:25.476Z" }, + { url = "https://files.pythonhosted.org/packages/79/64/e424e975adbd38282ebcd4891661965b78783de893b381cbc4832fb9beb2/numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7", size = 10460772, upload-time = "2025-09-09T15:56:27.679Z" }, + { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, + { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, + { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, + { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, + { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, + { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, + { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, + { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, + { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, + { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, + { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, + { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, + { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, + { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, + { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f2/7e0a37cfced2644c9563c529f29fa28acbd0960dde32ece683aafa6f4949/numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e", size = 21131019, upload-time = "2025-09-09T15:58:42.838Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/3291f505297ed63831135a6cc0f474da0c868a1f31b0dd9a9f03a7a0d2ed/numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150", size = 14376288, upload-time = "2025-09-09T15:58:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4b/ae02e985bdeee73d7b5abdefeb98aef1207e96d4c0621ee0cf228ddfac3c/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3", size = 5305425, upload-time = "2025-09-09T15:58:48.6Z" }, + { url = "https://files.pythonhosted.org/packages/8b/eb/9df215d6d7250db32007941500dc51c48190be25f2401d5b2b564e467247/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0", size = 6819053, upload-time = "2025-09-09T15:58:50.401Z" }, + { url = "https://files.pythonhosted.org/packages/57/62/208293d7d6b2a8998a4a1f23ac758648c3c32182d4ce4346062018362e29/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e", size = 14420354, upload-time = "2025-09-09T15:58:52.704Z" }, + { url = "https://files.pythonhosted.org/packages/ed/0c/8e86e0ff7072e14a71b4c6af63175e40d1e7e933ce9b9e9f765a95b4e0c3/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db", size = 16760413, upload-time = "2025-09-09T15:58:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/af/11/0cc63f9f321ccf63886ac203336777140011fb669e739da36d8db3c53b98/numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc", size = 12971844, upload-time = "2025-09-09T15:58:57.359Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/68/6b3659daec3a81aed5ab47700adb1a577c76a5452d35b91c88efee89987f/orjson-3.11.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9c8494625ad60a923af6b2b0bd74107146efe9b55099e20d7740d995f338fcd8", size = 245318, upload-time = "2025-12-06T15:54:02.355Z" }, + { url = "https://files.pythonhosted.org/packages/e9/00/92db122261425f61803ccf0830699ea5567439d966cbc35856fe711bfe6b/orjson-3.11.5-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:7bb2ce0b82bc9fd1168a513ddae7a857994b780b2945a8c51db4ab1c4b751ebc", size = 129491, upload-time = "2025-12-06T15:54:03.877Z" }, + { url = "https://files.pythonhosted.org/packages/94/4f/ffdcb18356518809d944e1e1f77589845c278a1ebbb5a8297dfefcc4b4cb/orjson-3.11.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67394d3becd50b954c4ecd24ac90b5051ee7c903d167459f93e77fc6f5b4c968", size = 132167, upload-time = "2025-12-06T15:54:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/0a8caff96f4503f4f7dd44e40e90f4d14acf80d3b7a97cb88747bb712d3e/orjson-3.11.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:298d2451f375e5f17b897794bcc3e7b821c0f32b4788b9bcae47ada24d7f3cf7", size = 130516, upload-time = "2025-12-06T15:54:06.274Z" }, + { url = "https://files.pythonhosted.org/packages/4d/63/43d4dc9bd9954bff7052f700fdb501067f6fb134a003ddcea2a0bb3854ed/orjson-3.11.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa5e4244063db8e1d87e0f54c3f7522f14b2dc937e65d5241ef0076a096409fd", size = 135695, upload-time = "2025-12-06T15:54:07.702Z" }, + { url = "https://files.pythonhosted.org/packages/87/6f/27e2e76d110919cb7fcb72b26166ee676480a701bcf8fc53ac5d0edce32f/orjson-3.11.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1db2088b490761976c1b2e956d5d4e6409f3732e9d79cfa69f876c5248d1baf9", size = 139664, upload-time = "2025-12-06T15:54:08.828Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/5966153a5f1be49b5fbb8ca619a529fde7bc71aa0a376f2bb83fed248bcd/orjson-3.11.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2ed66358f32c24e10ceea518e16eb3549e34f33a9d51f99ce23b0251776a1ef", size = 137289, upload-time = "2025-12-06T15:54:09.898Z" }, + { url = "https://files.pythonhosted.org/packages/a7/34/8acb12ff0299385c8bbcbb19fbe40030f23f15a6de57a9c587ebf71483fb/orjson-3.11.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2021afda46c1ed64d74b555065dbd4c2558d510d8cec5ea6a53001b3e5e82a9", size = 138784, upload-time = "2025-12-06T15:54:11.022Z" }, + { url = "https://files.pythonhosted.org/packages/ee/27/910421ea6e34a527f73d8f4ee7bdffa48357ff79c7b8d6eb6f7b82dd1176/orjson-3.11.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b42ffbed9128e547a1647a3e50bc88ab28ae9daa61713962e0d3dd35e820c125", size = 141322, upload-time = "2025-12-06T15:54:12.427Z" }, + { url = "https://files.pythonhosted.org/packages/87/a3/4b703edd1a05555d4bb1753d6ce44e1a05b7a6d7c164d5b332c795c63d70/orjson-3.11.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8d5f16195bb671a5dd3d1dbea758918bada8f6cc27de72bd64adfbd748770814", size = 413612, upload-time = "2025-12-06T15:54:13.858Z" }, + { url = "https://files.pythonhosted.org/packages/1b/36/034177f11d7eeea16d3d2c42a1883b0373978e08bc9dad387f5074c786d8/orjson-3.11.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c0e5d9f7a0227df2927d343a6e3859bebf9208b427c79bd31949abcc2fa32fa5", size = 150993, upload-time = "2025-12-06T15:54:15.189Z" }, + { url = "https://files.pythonhosted.org/packages/44/2f/ea8b24ee046a50a7d141c0227c4496b1180b215e728e3b640684f0ea448d/orjson-3.11.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:23d04c4543e78f724c4dfe656b3791b5f98e4c9253e13b2636f1af5d90e4a880", size = 141774, upload-time = "2025-12-06T15:54:16.451Z" }, + { url = "https://files.pythonhosted.org/packages/8a/12/cc440554bf8200eb23348a5744a575a342497b65261cd65ef3b28332510a/orjson-3.11.5-cp311-cp311-win32.whl", hash = "sha256:c404603df4865f8e0afe981aa3c4b62b406e6d06049564d58934860b62b7f91d", size = 135109, upload-time = "2025-12-06T15:54:17.73Z" }, + { url = "https://files.pythonhosted.org/packages/a3/83/e0c5aa06ba73a6760134b169f11fb970caa1525fa4461f94d76e692299d9/orjson-3.11.5-cp311-cp311-win_amd64.whl", hash = "sha256:9645ef655735a74da4990c24ffbd6894828fbfa117bc97c1edd98c282ecb52e1", size = 133193, upload-time = "2025-12-06T15:54:19.426Z" }, + { url = "https://files.pythonhosted.org/packages/cb/35/5b77eaebc60d735e832c5b1a20b155667645d123f09d471db0a78280fb49/orjson-3.11.5-cp311-cp311-win_arm64.whl", hash = "sha256:1cbf2735722623fcdee8e712cbaaab9e372bbcb0c7924ad711b261c2eccf4a5c", size = 126830, upload-time = "2025-12-06T15:54:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" }, + { url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" }, + { url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" }, + { url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" }, + { url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" }, + { url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" }, + { url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" }, + { url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" }, + { url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" }, + { url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" }, + { url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" }, + { url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" }, + { url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" }, + { url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" }, + { url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" }, + { url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" }, + { url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" }, + { url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" }, + { url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" }, + { url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pandas" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, + { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, + { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, + { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, + { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, + { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, + { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, + { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, +] + +[[package]] +name = "pandas-stubs" +version = "2.3.2.250926" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "types-pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/3b/32be58a125db39d0b5f62cc93795f32b5bb2915bd5c4a46f0e35171985e2/pandas_stubs-2.3.2.250926.tar.gz", hash = "sha256:c64b9932760ceefb96a3222b953e6a251321a9832a28548be6506df473a66406", size = 102147, upload-time = "2025-09-26T19:50:39.522Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/96/1e4a035eaf4dce9610aac6e43026d0c6baa05773daf6d21e635a4fe19e21/pandas_stubs-2.3.2.250926-py3-none-any.whl", hash = "sha256:81121818453dcfe00f45c852f4dceee043640b813830f6e7bd084a4ef7ff7270", size = 159995, upload-time = "2025-09-26T19:50:38.241Z" }, +] + +[[package]] +name = "parso" +version = "0.8.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, +] + +[[package]] +name = "pint" +version = "0.25" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flexcache" }, + { name = "flexparser" }, + { name = "platformdirs" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/cb/e6ffaf3d019e8501b1264dac529bf829ac2f1fe1d488cfcf67f1fccadacf/pint-0.25.tar.gz", hash = "sha256:22911a30d682ee0540d656571c19a7b1806ce00b2be88a16f67218108b7b8cc2", size = 253010, upload-time = "2025-08-15T19:49:12.72Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/cc/c528311d798e22ec884b816e8aa2989e0f1f28cdc8e5969e2be5f10bce85/pint-0.25-py3-none-any.whl", hash = "sha256:cc20ae3dff010b9bbea41fb80c2de008f683cc83512cea73633d55aead80aa1e", size = 305462, upload-time = "2025-08-15T19:49:11.083Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, +] + +[[package]] +name = "pyarrow" +version = "22.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, + { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, + { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, + { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, + { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, + { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, + { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629, upload-time = "2025-10-24T10:06:20.274Z" }, + { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783, upload-time = "2025-10-24T10:06:27.301Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999, upload-time = "2025-10-24T10:06:35.387Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601, upload-time = "2025-10-24T10:06:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050, upload-time = "2025-10-24T10:06:52.284Z" }, + { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877, upload-time = "2025-10-24T10:07:02.405Z" }, + { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099, upload-time = "2025-10-24T10:08:07.259Z" }, + { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685, upload-time = "2025-10-24T10:07:11.47Z" }, + { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158, upload-time = "2025-10-24T10:07:18.626Z" }, + { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060, upload-time = "2025-10-24T10:07:26.002Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395, upload-time = "2025-10-24T10:07:34.09Z" }, + { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216, upload-time = "2025-10-24T10:07:43.528Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552, upload-time = "2025-10-24T10:07:53.519Z" }, + { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504, upload-time = "2025-10-24T10:08:00.932Z" }, +] + +[[package]] +name = "pyarrow-stubs" +version = "20.0.0.20250928" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyarrow" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/5f/9520b0a5cd42b95a945b8ca3bc47f723fc7ec906b7a7de76f2d075d69911/pyarrow_stubs-20.0.0.20250928.tar.gz", hash = "sha256:e802b18e8e5fdf0a78afa05fae78f1456d861fcb1f95ec0234be5d6a5ecdcde2", size = 236588, upload-time = "2025-09-28T02:50:04.839Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/13/75c86a8ef61ea2c758c924318cf894dced2436b0f7aeb3c5f0fe9e4305b4/pyarrow_stubs-20.0.0.20250928-py3-none-any.whl", hash = "sha256:5389057a55db3c2662c05f22685a52e15e5effaf4345f41f12fb9b6b348647b9", size = 235745, upload-time = "2025-09-28T02:50:03.205Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-missing-modules" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/69/5161e1cf918baccdac65db7cbef78e1cfe29ee3b83fcb4a2f3568d8573a0/pytest_missing_modules-0.2.1.tar.gz", hash = "sha256:747a6acb975904c79d26410f0e0119cfcf6144cfdc5c2a70cc1f0955a57458e1", size = 10192, upload-time = "2024-09-03T10:05:30.446Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/55/40d10c08a8e77d9ee549dfaa75d0056b4328374956e124cbdd4b77b73f78/pytest_missing_modules-0.2.1-py3-none-any.whl", hash = "sha256:ad8827b4ab01049dd552bbd2875b844dacb1e6c61e01d216f90f566e9096d14b", size = 5536, upload-time = "2024-09-03T10:05:29.498Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "13.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/01/c954e134dc440ab5f96952fe52b4fdc64225530320a910473c1fe270d9aa/rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432", size = 221248, upload-time = "2024-02-28T14:51:19.472Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", size = 240681, upload-time = "2024-02-28T14:51:14.353Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/8e/f9f9ca747fea8e3ac954e3690d4698c9737c23b51731d02df999c150b1c9/ruff-0.13.3.tar.gz", hash = "sha256:5b0ba0db740eefdfbcce4299f49e9eaefc643d4d007749d77d047c2bab19908e", size = 5438533, upload-time = "2025-10-02T19:29:31.582Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/33/8f7163553481466a92656d35dea9331095122bb84cf98210bef597dd2ecd/ruff-0.13.3-py3-none-linux_armv6l.whl", hash = "sha256:311860a4c5e19189c89d035638f500c1e191d283d0cc2f1600c8c80d6dcd430c", size = 12484040, upload-time = "2025-10-02T19:28:49.199Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b5/4a21a4922e5dd6845e91896b0d9ef493574cbe061ef7d00a73c61db531af/ruff-0.13.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2bdad6512fb666b40fcadb65e33add2b040fc18a24997d2e47fee7d66f7fcae2", size = 13122975, upload-time = "2025-10-02T19:28:52.446Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/15649af836d88c9f154e5be87e64ae7d2b1baa5a3ef317cb0c8fafcd882d/ruff-0.13.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fc6fa4637284708d6ed4e5e970d52fc3b76a557d7b4e85a53013d9d201d93286", size = 12346621, upload-time = "2025-10-02T19:28:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/bcbccb8141305f9a6d3f72549dd82d1134299177cc7eaf832599700f95a7/ruff-0.13.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c9e6469864f94a98f412f20ea143d547e4c652f45e44f369d7b74ee78185838", size = 12574408, upload-time = "2025-10-02T19:28:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/ce/19/0f3681c941cdcfa2d110ce4515624c07a964dc315d3100d889fcad3bfc9e/ruff-0.13.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bf62b705f319476c78891e0e97e965b21db468b3c999086de8ffb0d40fd2822", size = 12285330, upload-time = "2025-10-02T19:28:58.79Z" }, + { url = "https://files.pythonhosted.org/packages/10/f8/387976bf00d126b907bbd7725219257feea58650e6b055b29b224d8cb731/ruff-0.13.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cc1abed87ce40cb07ee0667ce99dbc766c9f519eabfd948ed87295d8737c60", size = 13980815, upload-time = "2025-10-02T19:29:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a6/7c8ec09d62d5a406e2b17d159e4817b63c945a8b9188a771193b7e1cc0b5/ruff-0.13.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4fb75e7c402d504f7a9a259e0442b96403fa4a7310ffe3588d11d7e170d2b1e3", size = 14987733, upload-time = "2025-10-02T19:29:04.036Z" }, + { url = "https://files.pythonhosted.org/packages/97/e5/f403a60a12258e0fd0c2195341cfa170726f254c788673495d86ab5a9a9d/ruff-0.13.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b951f9d9afb39330b2bdd2dd144ce1c1335881c277837ac1b50bfd99985ed3", size = 14439848, upload-time = "2025-10-02T19:29:06.684Z" }, + { url = "https://files.pythonhosted.org/packages/39/49/3de381343e89364c2334c9f3268b0349dc734fc18b2d99a302d0935c8345/ruff-0.13.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6052f8088728898e0a449f0dde8fafc7ed47e4d878168b211977e3e7e854f662", size = 13421890, upload-time = "2025-10-02T19:29:08.767Z" }, + { url = "https://files.pythonhosted.org/packages/ab/b5/c0feca27d45ae74185a6bacc399f5d8920ab82df2d732a17213fb86a2c4c/ruff-0.13.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc742c50f4ba72ce2a3be362bd359aef7d0d302bf7637a6f942eaa763bd292af", size = 13444870, upload-time = "2025-10-02T19:29:11.234Z" }, + { url = "https://files.pythonhosted.org/packages/50/a1/b655298a1f3fda4fdc7340c3f671a4b260b009068fbeb3e4e151e9e3e1bf/ruff-0.13.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:8e5640349493b378431637019366bbd73c927e515c9c1babfea3e932f5e68e1d", size = 13691599, upload-time = "2025-10-02T19:29:13.353Z" }, + { url = "https://files.pythonhosted.org/packages/32/b0/a8705065b2dafae007bcae21354e6e2e832e03eb077bb6c8e523c2becb92/ruff-0.13.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b139f638a80eae7073c691a5dd8d581e0ba319540be97c343d60fb12949c8d0", size = 12421893, upload-time = "2025-10-02T19:29:15.668Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/cbe7082588d025cddbb2f23e6dfef08b1a2ef6d6f8328584ad3015b5cebd/ruff-0.13.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6b547def0a40054825de7cfa341039ebdfa51f3d4bfa6a0772940ed351d2746c", size = 12267220, upload-time = "2025-10-02T19:29:17.583Z" }, + { url = "https://files.pythonhosted.org/packages/a5/99/4086f9c43f85e0755996d09bdcb334b6fee9b1eabdf34e7d8b877fadf964/ruff-0.13.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9cc48a3564423915c93573f1981d57d101e617839bef38504f85f3677b3a0a3e", size = 13177818, upload-time = "2025-10-02T19:29:19.943Z" }, + { url = "https://files.pythonhosted.org/packages/9b/de/7b5db7e39947d9dc1c5f9f17b838ad6e680527d45288eeb568e860467010/ruff-0.13.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1a993b17ec03719c502881cb2d5f91771e8742f2ca6de740034433a97c561989", size = 13618715, upload-time = "2025-10-02T19:29:22.527Z" }, + { url = "https://files.pythonhosted.org/packages/28/d3/bb25ee567ce2f61ac52430cf99f446b0e6d49bdfa4188699ad005fdd16aa/ruff-0.13.3-py3-none-win32.whl", hash = "sha256:f14e0d1fe6460f07814d03c6e32e815bff411505178a1f539a38f6097d3e8ee3", size = 12334488, upload-time = "2025-10-02T19:29:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/cf/49/12f5955818a1139eed288753479ba9d996f6ea0b101784bb1fe6977ec128/ruff-0.13.3-py3-none-win_amd64.whl", hash = "sha256:621e2e5812b691d4f244638d693e640f188bacbb9bc793ddd46837cea0503dd2", size = 13455262, upload-time = "2025-10-02T19:29:26.882Z" }, + { url = "https://files.pythonhosted.org/packages/fe/72/7b83242b26627a00e3af70d0394d68f8f02750d642567af12983031777fc/ruff-0.13.3-py3-none-win_arm64.whl", hash = "sha256:9e9e9d699841eaf4c2c798fa783df2fabc680b72059a02ca0ed81c460bc58330", size = 12538484, upload-time = "2025-10-02T19:29:28.951Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "roman-numerals-py" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736, upload-time = "2023-07-08T18:40:54.166Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496, upload-time = "2023-07-08T18:40:52.659Z" }, +] + +[[package]] +name = "sphinx-click" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "docutils" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/4b/c433ea57136eac0ccb8d76d33355783f1e6e77f1f13dc7d8f15dba2dc024/sphinx_click-6.1.0.tar.gz", hash = "sha256:c702e0751c1a0b6ad649e4f7faebd0dc09a3cc7ca3b50f959698383772f50eef", size = 26855, upload-time = "2025-09-11T11:05:45.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/95/a2fa680f02ee9cbe4532169d2e60b102fe415b6cfa25584ac2d112e4c43b/sphinx_click-6.1.0-py3-none-any.whl", hash = "sha256:7dbed856c3d0be75a394da444850d5fc7ecc5694534400aa5ed4f4849a8643f9", size = 8931, upload-time = "2025-09-11T11:05:43.897Z" }, +] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/2b/a964715e7f5295f77509e59309959f4125122d648f86b4fe7d70ca1d882c/sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd", size = 23039, upload-time = "2023-04-14T08:10:22.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/48/1ea60e74949eecb12cdd6ac43987f9fd331156388dcc2319b45e2ebb81bf/sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e", size = 13343, upload-time = "2023-04-14T08:10:20.844Z" }, +] + +[[package]] +name = "sphinx-tabs" +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "pygments" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/53/a9a91995cb365e589f413b77fc75f1c0e9b4ac61bfa8da52a779ad855cc0/sphinx-tabs-3.4.7.tar.gz", hash = "sha256:991ad4a424ff54119799ba1491701aa8130dd43509474aef45a81c42d889784d", size = 15891, upload-time = "2024-10-08T13:37:27.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/c6/f47505b564b918a3ba60c1e99232d4942c4a7e44ecaae603e829e3d05dae/sphinx_tabs-3.4.7-py3-none-any.whl", hash = "sha256:c12d7a36fd413b369e9e9967a0a4015781b71a9c393575419834f19204bd1915", size = 9727, upload-time = "2024-10-08T13:37:26.192Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.37" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3b/20/93ea2518df4d7a14ebe9ace9ab8bb92aaf7df0072b9007644de74172b06c/sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb", size = 9626249, upload-time = "2025-01-09T22:43:25.981Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/37/4915290c1849337be6d24012227fb3c30c575151eec2b182ee5f45e96ce7/SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c", size = 2104098, upload-time = "2025-01-10T00:32:29.975Z" }, + { url = "https://files.pythonhosted.org/packages/4c/f5/8cce9196434014a24cc65f6c68faa9a887080932361ee285986c0a35892d/SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5", size = 2094492, upload-time = "2025-01-10T00:32:32.697Z" }, + { url = "https://files.pythonhosted.org/packages/9c/54/2df4b3d0d11b384b6e9a8788d0f1123243f2d2356e2ccf626f93dcc1a09f/SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8", size = 3212789, upload-time = "2025-01-10T02:42:56.584Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/e1db9475f940f1c54c365ed02d4f6390f884fc95a6a4022ece7725956664/SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b", size = 3212784, upload-time = "2025-01-10T00:58:09.639Z" }, + { url = "https://files.pythonhosted.org/packages/89/57/d93212e827d1f03a6cd4d0ea13775957c2a95161330fa47449b91153bd09/SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087", size = 3149616, upload-time = "2025-01-10T02:42:58.816Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c2/759347419f69cf0bbb76d330fbdbd24cefb15842095fe86bca623759b9e8/SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9", size = 3169944, upload-time = "2025-01-10T00:58:12.998Z" }, + { url = "https://files.pythonhosted.org/packages/22/04/a19ecb53aa19bb8cf491ecdb6bf8c1ac74959cd4962e119e91d4e2b8ecaa/SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989", size = 2074686, upload-time = "2025-01-09T22:59:12.557Z" }, + { url = "https://files.pythonhosted.org/packages/7b/9d/6e030cc2c675539dbc5ef73aa97a3cbe09341e27ad38caed2b70c4273aff/SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba", size = 2099891, upload-time = "2025-01-09T22:59:15.253Z" }, + { url = "https://files.pythonhosted.org/packages/86/62/e5de4a5e0c4f5ceffb2b461aaa2378c0ee00642930a8c38e5b80338add0f/SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef", size = 2102692, upload-time = "2025-01-10T00:36:41.573Z" }, + { url = "https://files.pythonhosted.org/packages/01/44/3b65f4f16abeffd611da0ebab9e3aadfca45d041a78a67835c41c6d28289/SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4", size = 2093079, upload-time = "2025-01-10T00:36:44.98Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d8/e3a6622e86e3ae3a41ba470d1bb095c1f2dedf6b71feae0b4b94b5951017/SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4", size = 3242509, upload-time = "2025-01-10T02:36:54.407Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ef/5a53a6a60ac5a5d4ed28959317dac1ff72bc16773ccd9b3fe79713fe27f3/SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd", size = 3253368, upload-time = "2025-01-10T00:56:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/67/f2/30f5012379031cd5389eb06455282f926a4f99258e5ee5ccdcea27f30d67/SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098", size = 3188655, upload-time = "2025-01-10T02:36:58.732Z" }, + { url = "https://files.pythonhosted.org/packages/fe/df/905499aa051605aeda62c1faf33d941ffb7fda291159ab1c24ef5207a079/SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb", size = 3215281, upload-time = "2025-01-10T00:56:35.9Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/f2769e7e356520f75016d82ca43ed85e47ba50e636a34124db4625ae5976/SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761", size = 2072972, upload-time = "2025-01-09T22:59:55.279Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7f/241f059e0b7edb85845368f43964d6b0b41733c2f7fffaa993f8e66548a5/SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff", size = 2098597, upload-time = "2025-01-09T22:59:58.352Z" }, + { url = "https://files.pythonhosted.org/packages/45/d1/e63e56ceab148e69f545703a74b90c8c6dc0a04a857e4e63a4c07a23cf91/SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658", size = 2097968, upload-time = "2025-01-10T00:36:47.779Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e5/93ce63310347062bd42aaa8b6785615c78539787ef4380252fcf8e2dcee3/SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb", size = 2088445, upload-time = "2025-01-10T00:36:49.309Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8c/d0e0081c09188dd26040fc8a09c7d87f539e1964df1ac60611b98ff2985a/SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4", size = 3174880, upload-time = "2025-01-10T02:37:01.904Z" }, + { url = "https://files.pythonhosted.org/packages/79/f7/3396038d8d4ea92c72f636a007e2fac71faae0b59b7e21af46b635243d09/SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94", size = 3188226, upload-time = "2025-01-10T00:56:37.639Z" }, + { url = "https://files.pythonhosted.org/packages/ef/33/7a1d85716b29c86a744ed43690e243cb0e9c32e3b68a67a97eaa6b49ef66/SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0", size = 3121425, upload-time = "2025-01-10T02:37:04.014Z" }, + { url = "https://files.pythonhosted.org/packages/27/11/fa63a77c88eb2f79bb8b438271fbacd66a546a438e4eaba32d62f11298e2/SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6", size = 3149589, upload-time = "2025-01-10T00:56:40.578Z" }, + { url = "https://files.pythonhosted.org/packages/b6/04/fcdd103b6871f2110460b8275d1c4828daa806997b0fa5a01c1cd7fd522d/SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2", size = 2070746, upload-time = "2025-01-09T23:00:00.985Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7c/e024719205bdc1465b7b7d3d22ece8e1ad57bc7d76ef6ed78bb5f812634a/SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2", size = 2094612, upload-time = "2025-01-09T23:00:03.8Z" }, + { url = "https://files.pythonhosted.org/packages/3b/36/59cc97c365f2f79ac9f3f51446cae56dfd82c4f2dd98497e6be6de20fb91/SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1", size = 1894113, upload-time = "2025-01-10T00:44:58.368Z" }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20250822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/0a/775f8551665992204c756be326f3575abba58c4a3a52eef9909ef4536428/types_python_dateutil-2.9.0.20250822.tar.gz", hash = "sha256:84c92c34bd8e68b117bff742bc00b692a1e8531262d4507b33afcc9f7716cd53", size = 16084, upload-time = "2025-08-22T03:02:00.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/d9/a29dfa84363e88b053bf85a8b7f212a04f0d7343a4d24933baa45c06e08b/types_python_dateutil-2.9.0.20250822-py3-none-any.whl", hash = "sha256:849d52b737e10a6dc6621d2bd7940ec7c65fcb69e6aa2882acf4e56b2b508ddc", size = 17892, upload-time = "2025-08-22T03:01:59.436Z" }, +] + +[[package]] +name = "types-pytz" +version = "2025.2.0.20250809" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876, upload-time = "2025-08-09T03:14:17.453Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095, upload-time = "2025-08-09T03:14:16.674Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, +]