diff --git a/.github/workflows/docs-pr-check.yml b/.github/workflows/docs-pr-check.yml
index 7fbf8a62..c4e9da32 100644
--- a/.github/workflows/docs-pr-check.yml
+++ b/.github/workflows/docs-pr-check.yml
@@ -1,12 +1,21 @@
name: "Pull Request Docs Check"
-on:
-- pull_request
+
+on:
+ pull_request:
jobs:
docs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v1
- - uses: ammaraskar/sphinx-action@master
- with:
- docs-folder: "docs/"
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Set up uv
+ uses: astral-sh/setup-uv@v4
+
+ - name: Build docs
+ run: make docs
diff --git a/.gitignore b/.gitignore
index 3a093314..b566efbf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,9 @@ __pycache__/
# C extensions
*.so
+# dev stuff
+dev_files
+
# Distribution / packaging
.Python
env/
@@ -83,4 +86,4 @@ TODO.md
# Environment items
.env
-.envrc
\ No newline at end of file
+.envrc
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..e525175a
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,6 @@
+repos:
+ - repo: https://github.com/psf/black
+ rev: 24.2.0
+ hooks:
+ - id: black
+ files: gerrytools/
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index a76326b9..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,3 +0,0 @@
-include versioneer.py
-include gerrychain/_version.py
-include LICENSE
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..445c600a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,96 @@
+# Makefile for managing GerryChain development tasks using 'uv' virtual environment manager.
+
+PYTHON_VERSION = 3.11
+VENV_DIR ?= .venv
+PKG ?= gerrychain
+TEST_PATHS ?= tests
+
+.PHONY: help setup install dev install-docs test lint format precommit docs clean
+
+help:
+ @echo "Available targets:"
+ @echo " setup - Set up environment for full development including dev dependencies and pre-commit hooks"
+ @echo " install - Install the package"
+ @echo " install-docs - Install documentation dependencies"
+ @echo " test - Run the test suite"
+ @echo " lint - Run code linters"
+ @echo " format - Format the codebase"
+ @echo " precommit - Run pre-commit hooks"
+ @echo " docs - Build the documentation"
+ @echo " clean - Clean build artifacts"
+
+
+check_prereq:
+ @echo "Checking prerequisites..."
+ @if ! command -v uv > /dev/null 2>&1; then \
+ echo "Error: 'uv' is not installed. Please install it first using the following command:"; \
+ echo " curl -LsSf https://astral.sh/uv/install.sh | sh"; \
+ exit 1; \
+ fi
+ @echo "'uv' is installed."
+
+setup: check_prereq
+ @echo "Setting up the development environment for GerryChain..."
+ @echo
+ uv python install $(PYTHON_VERSION)
+ @echo "Creating virtual environment and installing dev dependencies..."
+ uv sync --python $(PYTHON_VERSION)
+ uv sync --all-groups
+ uv pip install -e .
+ uv run pre-commit install
+ @echo ""
+ @echo "Development environment setup complete!"
+
+install: check_prereq
+ @echo "Installing GerryChain package..."
+ uv sync --python $(PYTHON_VERSION)
+ uv pip install -e .
+
+install-docs: check_prereq
+ @echo "Installing GerryChain package with all just the documentation dependencies..."
+ uv sync --python $(PYTHON_VERSION)
+ uv sync --group docs
+ uv pip install -e .
+
+test:
+ @echo "Running test suite..."
+ uv run pytest -v $(TEST_PATHS)
+
+# Add this in later
+# type-check:
+# @echo "Running type checking with mypy..."
+# uv run mypy $(PKG) ${TEST_PATHS}
+
+format:
+ @echo "Formatting codebase with black..."
+ uv run isort $(PKG) $(TEST_PATHS)
+ uv run black $(PKG) $(TEST_PATHS)
+
+lint:
+ @echo "Running linters (ruff)..."
+ uv run ruff check $(PKG) $(TEST_PATHS)
+
+precommit:
+ @echo "Running pre-commit hooks..."
+ uv run pre-commit install
+ uv run pre-commit run --all-files
+
+docs: install-docs
+ @echo "Building documentation..."
+ uv run sphinx-build -b html docs/ docs/_build
+
+clean:
+ @echo "Cleaning build artifacts..."
+ @rm -rf build/ \
+ dist/ \
+ *.egg-info \
+ .pytest_cache/ \
+ .mypy_cache/ \
+ .ruff_cache/ \
+ docs/_build/ \
+ $(VENV_DIR) \
+ .vscode/ \
+ .ipynb_checkpoints/ \
+ docs/build/
+ @find . -type d -name "__pycache__" -exec rm -rf {} +
+ @echo "Clean complete."
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index cc2e6e11..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-SPHINXPROJ = GerryChain
-SOURCEDIR = .
-BUILDDIR = _build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
index 38cbd0c8..ed80ffc0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -81,7 +81,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = 'en'
+language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -120,7 +120,7 @@
#
# html_sidebars = {}
html_css_files = [
- 'css/custom.css',
+ "css/custom.css",
]
@@ -192,33 +192,6 @@
autodoc_default_flags = ["members"]
-# -- Mock C libraries --------------------------------------------------------
-
-# RTD is unable to install libraries with C dependencies.
-# We're using the mock module to mock those away.
-
-MOCK_MODULES = [
- "numpy",
- "pandas",
- "geopandas",
- "matplotlib",
- "matplotlib.pyplot",
- # "networkx",
- # "networkx.readwrite",
- # "networkx.algorithms",
- # "networkx.algorithms.shortest_paths",
- # "networkx.algorithms.shortest_paths.weighted",
- "shapely",
- "shapely.ops",
- "shapely.strtree",
- "shapely.prep",
- "shapely.prepared",
- "shapely.validation",
- "gerrychain.vendor.utm",
-]
-
-for module in MOCK_MODULES:
- sys.modules[module] = mock.Mock()
# -- Extension configuration -------------------------------------------------
diff --git a/docs/geo_settings.txt b/docs/geo_settings.txt
deleted file mode 100644
index 9dce397a..00000000
--- a/docs/geo_settings.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-fiona==1.9.5
-shapely==2.0.2
-pyproj==3.6.1
-geopandas==0.8.1
\ No newline at end of file
diff --git a/docs/index.rst b/docs/index.rst
index 826d1585..df28b786 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -37,7 +37,11 @@ repository, where `bug reports and feature requests`_, as well as
.. _`contributions`: https://github.com/mggg/gerrychain/pulls
-.. include:: user/install.rst
+.. include:: user/install_header.rst
+
+For more detailed installation instructions, including instructions for
+setting up virtual environments, please see the following section:
+:doc:`user/install`.
.. toctree::
:caption: User Guide
@@ -82,4 +86,4 @@ MGGG for the 2019 MIT IAP course `Computational Approaches for Political Redistr
:caption: Index
:maxdepth: 4
- full_ref
\ No newline at end of file
+ full_ref
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index 48d02baf..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,36 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=.
-set BUILDDIR=_build
-set SPHINXPROJ=GerryChain
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.http://sphinx-doc.org/
- exit /b 1
-)
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
-
-:end
-popd
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index aef5653d..00000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-recommonmark==0.7.1
-sphinx-rtd-theme==1.0.0
-sphinx-copybutton==0.5.2
-networkx==2.7.1
-geopandas==0.8.2
-matplotlib==3.5.1
-scipy==1.10.0
-tqdm==4.66.4
diff --git a/docs/user/install.rst b/docs/user/install.rst
index 76bbcb75..155130cc 100644
--- a/docs/user/install.rst
+++ b/docs/user/install.rst
@@ -1,20 +1,4 @@
-Installation
-============
-
-Supported Python Versions
--------------------------
-
-The most recent version of GerryChain (as of April 2024) supports
-
-- Python 3.9
-- Python 3.10
-- Python 3.11
-- Python 3.12
-
-If you do not have one of these versions installed on you machine, we
-recommend that you go to the
-`Python website `_ and
-download the installer for one of these versions. [1]_
+.. include:: ./install_header.rst
.. admonition:: A Note For Windows Users
:class: note
@@ -130,17 +114,6 @@ ready to install GerryChain.
To install GerryChain from PyPI_, run ``pip install gerrychain`` from
the command line.
-If you plan on using GerryChain's GIS functions, such as computing
-adjacencies or reading in shapefiles, then run
-``pip install gerrychain[geo]`` from the command line.
-
-This approach sometimes fails due to compatibility issues between our
-different Python GIS dependencies, like ``geopandas``, ``pyproj``,
-``fiona``, and ``shapely``. If you run into this issue, try installing
-the dependencies using the
-`geo_settings.txt `_
-file. To do this, run ``pip install -r geo_settings.txt`` from the
-command line.
.. note::
@@ -151,10 +124,6 @@ command line.
line.
.. _PyPI: https://pypi.org/
-.. [1] Of course, if you are using a Linux system, you will either need to use your
- system's package manager or install from source. You may also find luck installing
- Python directly from the package manager if you find installing from source to be
- troublesome.
.. include:: ../repeated_subsections/reproducible_envs.rst
diff --git a/docs/user/install_header.rst b/docs/user/install_header.rst
new file mode 100644
index 00000000..23ee17af
--- /dev/null
+++ b/docs/user/install_header.rst
@@ -0,0 +1,23 @@
+Installation
+============
+
+Supported Python Versions
+-------------------------
+
+The most recent version of GerryChain (as of April 2024) supports
+
+- Python 3.11
+- Python 3.12
+- Python 3.13
+
+If you do not have one of these versions installed on you machine, we
+recommend that you go to the
+`Python website `_ and
+download the installer for one of these versions.
+
+Most users can install GerryChain using pip:
+
+.. code:: console
+
+ pip install gerrychain
+
diff --git a/gerrychain/__init__.py b/gerrychain/__init__.py
index f3e66f99..ed984fa5 100644
--- a/gerrychain/__init__.py
+++ b/gerrychain/__init__.py
@@ -1,7 +1,6 @@
-from modulefinder import Module
import warnings
+from modulefinder import Module
-from ._version import get_versions
from .chain import MarkovChain
from .graph import Graph
from .partition import GeographicPartition, Partition
@@ -11,22 +10,10 @@
# It might be good to see how often this happens
warnings.simplefilter("once")
-try:
- import geopandas
-
- # warn about https://github.com/geopandas/geopandas/issues/2199
- if geopandas.options.use_pygeos:
- warnings.warn(
- "GerryChain cannot use GeoPandas when PyGeos is enabled. Disable or "
- "uninstall PyGeos. You can disable PyGeos in GeoPandas by setting "
- "`geopandas.options.use_pygeos = False` before importing your shapefile."
- )
-except ModuleNotFoundError:
- pass
-
-__version__ = get_versions()["version"]
-del get_versions
-
-from . import _version
-
-__version__ = _version.get_versions()["version"]
+__all__ = [
+ "Graph",
+ "Partition",
+ "GeographicPartition",
+ "MarkovChain",
+ "Election",
+]
diff --git a/gerrychain/_version.py b/gerrychain/_version.py
deleted file mode 100644
index af21f27b..00000000
--- a/gerrychain/_version.py
+++ /dev/null
@@ -1,716 +0,0 @@
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain.
-# Generated by versioneer-0.29
-# https://github.com/python-versioneer/python-versioneer
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple
-import functools
-
-
-def get_keywords() -> Dict[str, str]:
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "$Format:%d$"
- git_full = "$Format:%H$"
- git_date = "$Format:%ci$"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
- VCS: str
- style: str
- tag_prefix: str
- parentdir_prefix: str
- versionfile_source: str
- verbose: bool
-
-
-def get_config() -> VersioneerConfig:
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = ""
- cfg.tag_prefix = "'v'"
- cfg.parentdir_prefix = "gerrychain-"
- cfg.versionfile_source = "gerrychain/_version.py"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
-
-
-def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
- """Create decorator to mark a method as the handler of a VCS."""
-
- def decorate(f: Callable) -> Callable:
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
-
- return decorate
-
-
-def run_command(
- commands: List[str],
- args: List[str],
- cwd: Optional[str] = None,
- verbose: bool = False,
- hide_stderr: bool = False,
- env: Optional[Dict[str, str]] = None,
-) -> Tuple[Optional[str], Optional[int]]:
- """Call the given command(s)."""
- assert isinstance(commands, list)
- process = None
-
- popen_kwargs: Dict[str, Any] = {}
- if sys.platform == "win32":
- # This hides the console window if pythonw.exe is used
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- popen_kwargs["startupinfo"] = startupinfo
-
- for command in commands:
- try:
- dispcmd = str([command] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- process = subprocess.Popen(
- [command] + args,
- cwd=cwd,
- env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr else None),
- **popen_kwargs,
- )
- break
- except OSError as e:
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = process.communicate()[0].strip().decode()
- if process.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, process.returncode
- return stdout, process.returncode
-
-
-def versions_from_parentdir(
- parentdir_prefix: str,
- root: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for _ in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {
- "version": dirname[len(parentdir_prefix) :],
- "full-revisionid": None,
- "dirty": False,
- "error": None,
- "date": None,
- }
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print(
- "Tried directories %s but none started with prefix %s"
- % (str(rootdirs), parentdir_prefix)
- )
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords: Dict[str, str] = {}
- try:
- with open(versionfile_abs, "r") as fobj:
- for line in fobj:
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- except OSError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(
- keywords: Dict[str, str],
- tag_prefix: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Get version information from git keywords."""
- if "refnames" not in keywords:
- raise NotThisMethod("Short version file found")
- date = keywords.get("date")
- if date is not None:
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
-
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = {r.strip() for r in refnames.strip("()").split(",")}
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = {r for r in refs if re.search(r"\d", r)}
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix) :]
- # Filter out refs that exactly match prefix or that don't start
- # with a number once the prefix is stripped (mostly a concern
- # when prefix is '')
- if not re.match(r"\d", r):
- continue
- if verbose:
- print("picking %s" % r)
- return {
- "version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": None,
- "date": date,
- }
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {
- "version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False,
- "error": "no suitable tags",
- "date": None,
- }
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(
- tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command
-) -> Dict[str, Any]:
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- # GIT_DIR can interfere with correct operation of Versioneer.
- # It may be intended to be passed to the Versioneer-versioned project,
- # but that should not change where we get our version from.
- env = os.environ.copy()
- env.pop("GIT_DIR", None)
- runner = functools.partial(runner, env=env)
-
- _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = runner(
- GITS,
- [
- "describe",
- "--tags",
- "--dirty",
- "--always",
- "--long",
- "--match",
- f"{tag_prefix}[[:digit:]]*",
- ],
- cwd=root,
- )
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces: Dict[str, Any] = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
- # --abbrev-ref was added in git-1.6.3
- if rc != 0 or branch_name is None:
- raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
- branch_name = branch_name.strip()
-
- if branch_name == "HEAD":
- # If we aren't exactly on a branch, pick a branch which represents
- # the current commit. If all else fails, we are on a branchless
- # commit.
- branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
- # --contains was added in git-1.5.4
- if rc != 0 or branches is None:
- raise NotThisMethod("'git branch --contains' returned error")
- branches = branches.split("\n")
-
- # Remove the first line if we're running detached
- if "(" in branches[0]:
- branches.pop(0)
-
- # Strip off the leading "* " from the list of branches.
- branches = [branch[2:] for branch in branches]
- if "master" in branches:
- branch_name = "master"
- elif not branches:
- branch_name = None
- else:
- # Pick the first branch that is returned. Good or bad.
- branch_name = branches[0]
-
- pieces["branch"] = branch_name
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[: git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
- if not mo:
- # unparsable. Maybe git-describe is misbehaving?
- pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
- full_tag,
- tag_prefix,
- )
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix) :]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
- pieces["distance"] = len(out.split()) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces: Dict[str, Any]) -> str:
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces: Dict[str, Any]) -> str:
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_branch(pieces: Dict[str, Any]) -> str:
- """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
-
- The ".dev0" means not master branch. Note that .dev0 sorts backwards
- (a feature branch will appear "older" than the master branch).
-
- Exceptions:
- 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0"
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
- """Split pep440 version string at the post-release segment.
-
- Returns the release segments before the post-release and the
- post-release version number (or -1 if no post-release segment is present).
- """
- vc = str.split(ver, ".post")
- return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
-
-
-def render_pep440_pre(pieces: Dict[str, Any]) -> str:
- """TAG[.postN.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post0.devDISTANCE
- """
- if pieces["closest-tag"]:
- if pieces["distance"]:
- # update the post release segment
- tag_version, post_version = pep440_split_post(pieces["closest-tag"])
- rendered = tag_version
- if post_version is not None:
- rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
- else:
- rendered += ".post0.dev%d" % (pieces["distance"])
- else:
- # no commits, use the tag as the version
- rendered = pieces["closest-tag"]
- else:
- # exception #1
- rendered = "0.post0.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
-
- The ".dev0" means not master branch.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_old(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces: Dict[str, Any]) -> str:
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces: Dict[str, Any]) -> str:
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {
- "version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None,
- }
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-branch":
- rendered = render_pep440_branch(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-post-branch":
- rendered = render_pep440_post_branch(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {
- "version": rendered,
- "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"],
- "error": None,
- "date": pieces.get("date"),
- }
-
-
-def get_versions() -> Dict[str, Any]:
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for _ in cfg.versionfile_source.split("/"):
- root = os.path.dirname(root)
- except NameError:
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None,
- }
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {
- "version": "0+unknown",
- "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version",
- "date": None,
- }
diff --git a/gerrychain/accept.py b/gerrychain/accept.py
index 92d3b5c8..2f2143a3 100644
--- a/gerrychain/accept.py
+++ b/gerrychain/accept.py
@@ -9,6 +9,7 @@
"""
import random
+
from gerrychain.partition import Partition
@@ -21,6 +22,8 @@ def cut_edge_accept(partition: Partition) -> bool:
Always accepts the flip if the number of cut_edges increases.
Otherwise, uses the Metropolis criterion to decide.
+ frm: TODO: Documentation: Add documentation on what the "Metropolis criterion" is...
+
:param partition: The current partition to accept a flip from.
:type partition: Partition
diff --git a/gerrychain/chain.py b/gerrychain/chain.py
index 9b63f8a7..4659275b 100644
--- a/gerrychain/chain.py
+++ b/gerrychain/chain.py
@@ -24,9 +24,9 @@
Last Updated: 11 Jan 2024
"""
-from typing import Union, Iterable, Callable, Optional
+from typing import Callable, Iterable, Optional, Union
-from gerrychain.constraints import Validator, Bounds
+from gerrychain.constraints import Bounds, Validator
from gerrychain.partition import Partition
diff --git a/gerrychain/constraints/__init__.py b/gerrychain/constraints/__init__.py
index 58d75472..a706aaa1 100644
--- a/gerrychain/constraints/__init__.py
+++ b/gerrychain/constraints/__init__.py
@@ -37,12 +37,12 @@
"""
from .bounds import (
+ Bounds,
LowerBound,
SelfConfiguringLowerBound,
SelfConfiguringUpperBound,
UpperBound,
WithinPercentRangeOfBounds,
- Bounds,
)
from .compactness import (
L1_polsby_popper,
diff --git a/gerrychain/constraints/bounds.py b/gerrychain/constraints/bounds.py
index 88dd8a68..b27f71f4 100644
--- a/gerrychain/constraints/bounds.py
+++ b/gerrychain/constraints/bounds.py
@@ -1,4 +1,5 @@
from typing import Callable, Tuple
+
from ..partition import Partition
diff --git a/gerrychain/constraints/compactness.py b/gerrychain/constraints/compactness.py
index f32dff7f..2f1d6933 100644
--- a/gerrychain/constraints/compactness.py
+++ b/gerrychain/constraints/compactness.py
@@ -1,7 +1,6 @@
-from ..partition import Partition
import math
-
+from ..partition import Partition
from .bounds import SelfConfiguringLowerBound, SelfConfiguringUpperBound
@@ -58,13 +57,9 @@ def L_minus_1_polsby_popper(partition):
:returns: :math:`L^{-1}` norm of the Polsby-Popper scores
:rtype: float
"""
- return len(partition.parts) / sum(
- 1 / value for value in partition["polsby_popper"].values()
- )
+ return len(partition.parts) / sum(1 / value for value in partition["polsby_popper"].values())
no_worse_L_minus_1_polsby_popper = SelfConfiguringLowerBound(L_minus_1_polsby_popper)
-no_worse_L1_reciprocal_polsby_popper = SelfConfiguringUpperBound(
- L1_reciprocal_polsby_popper
-)
+no_worse_L1_reciprocal_polsby_popper = SelfConfiguringUpperBound(L1_reciprocal_polsby_popper)
diff --git a/gerrychain/constraints/contiguity.py b/gerrychain/constraints/contiguity.py
index e1077e4a..9f9cb8f2 100644
--- a/gerrychain/constraints/contiguity.py
+++ b/gerrychain/constraints/contiguity.py
@@ -1,66 +1,125 @@
+import random
from heapq import heappop, heappush
from itertools import count
+from typing import Any, Callable, Dict, Set
-import networkx as nx
-from typing import Callable, Any, Dict, Set
+from ..graph import Graph
from ..partition import Partition
-import random
from .bounds import SelfConfiguringLowerBound
-
-def are_reachable(G: nx.Graph, source: Any, avoid: Callable, targets: Any) -> bool:
+# frm: TODO: Performance: Think about the efficiency of the routines in this module. Almost all
+# of these involve traversing the entire graph, and I fear that callers
+# might make multiple calls.
+#
+# Possible solutions are to 1) speed up these routines somehow and 2) cache
+# results so that at least we don't do the traversals over and over.
+
+# frm: TODO: Refactoring: Rethink WTF this module is all about.
+#
+# It seems like a grab bag for lots of different things - used in different places.
+#
+# What got me to write this comment was looking at the signature for def contiguous()
+# which operates on a partition, but lots of other routines here operate on graphs or
+# other things. So, what is going on?
+#
+# Peter replied to this comment in a pull request:
+#
+# So anything that is prefixed with an underscore in here should be a helper
+# function and not a part of the public API. It looks like, other than
+# is_connected_bfs (which should probably be marked "private" with an
+# underscore) everything here is acting like an updater.
+#
+
+
+def _are_reachable(graph: Graph, start_node: Any, avoid: Callable, targets: Any) -> bool:
"""
A modified version of NetworkX's function
`networkx.algorithms.shortest_paths.weighted._dijkstra_multisource()`
- This function checks if the targets are reachable from the source node
+ This function checks if the targets are reachable from the start_node node
while avoiding edges based on the avoid condition function.
- :param G: The networkx graph
- :type G: nx.Graph
- :param source: The starting node
- :type source: int
+ :param graph: Graph
+ :type graph: Graph
+ :param start_node: The starting node
+ :type start_node: int
:param avoid: The function that determines if an edge should be avoided.
It should take in three parameters: the start node, the end node, and
the edges to avoid. It should return True if the edge should be avoided,
False otherwise.
+ # frm: TODO: Documentation: Fix the comment above about the "avoid" function parameter.
+ # It may have once been accurate, but the original code below
+ # passed parameters to it of (node_id, neighbor_node_id, edge_data_dict)
+ # from NetworkX.Graph._succ So, "the edges to avoid" above is wrong.
+ # This whole issue is moot, however, since the only routine
+ # that is used as an avoid function ignores the third parameter.
+ # Or rather it used to avoid the third parameter, but it has
+ # been updated to only take two parameters, and the code below
+ # has been modified to use Graph.neighbors() instead of _succ
+ # because 1) we can't use NX and 2) because we don't need the
+ # edge data dictionary anyways...
+ #
:type avoid: Callable
:param targets: The target nodes that we would like to reach
:type targets: Any
- :returns: True if all of the targets are reachable from the source node
+ :returns: True if all of the targets are reachable from the start_node node
under the avoid condition, False otherwise.
:rtype: bool
"""
- G_succ = G._succ if G.is_directed() else G._adj
-
push = heappush
pop = heappop
- dist = {} # dictionary of final distances
+ node_distances = {} # dictionary of final distances
seen = {}
# fringe is heapq with 3-tuples (distance,c,node)
# use the count c to avoid comparing nodes (may not be able to)
c = count()
fringe = []
- seen[source] = 0
- push(fringe, (0, next(c), source))
-
- while not all(t in seen for t in targets) and fringe:
- (d, _, v) = pop(fringe)
- if v in dist:
+ seen[start_node] = 0
+ push(fringe, (0, next(c), start_node))
+
+ # frm: Original Code:
+ #
+ # while not all(t in seen for t in targets) and fringe:
+ # (d, _, v) = pop(fringe)
+ # if v in dist:
+ # continue # already searched this node.
+ # dist[v] = d
+ # for u, e in G_succ[v].items():
+ # if avoid(v, u, e):
+ # continue
+ #
+ # vu_dist = dist[v] + 1
+ # if u not in seen or vu_dist < seen[u]:
+ # seen[u] = vu_dist
+ # push(fringe, (vu_dist, next(c), u))
+ #
+ # return all(t in seen for t in targets)
+ #
+
+ # While we have not yet seen all of our targets and while there is
+ # still some fringe...
+ while not all(tgt in seen for tgt in targets) and fringe:
+ (distance, _, node_id) = pop(fringe)
+ if node_id in node_distances:
continue # already searched this node.
- dist[v] = d
- for u, e in G_succ[v].items():
- if avoid(v, u, e):
+ node_distances[node_id] = distance
+
+ for neighbor in graph.neighbors(node_id):
+ if avoid(node_id, neighbor):
continue
- vu_dist = dist[v] + 1
- if u not in seen or vu_dist < seen[u]:
- seen[u] = vu_dist
- push(fringe, (vu_dist, next(c), u))
+ neighbor_distance = node_distances[node_id] + 1
+ if neighbor not in seen or neighbor_distance < seen[neighbor]:
+ seen[neighbor] = neighbor_distance
+ push(fringe, (neighbor_distance, next(c), neighbor))
- return all(t in seen for t in targets)
+ # frm: TODO: Refactoring: Simplify this code. It computes distances and counts but
+ # never uses them. These must be relics of code copied
+ # from somewhere else where it had more uses...
+
+ return all(tgt in seen for tgt in targets)
def single_flip_contiguous(partition: Partition) -> bool:
@@ -87,7 +146,7 @@ def single_flip_contiguous(partition: Partition) -> bool:
graph = partition.graph
assignment = partition.assignment
- def partition_edge_avoid(start_node: Any, end_node: Any, edge_attrs: Dict):
+ def _partition_edge_avoid(start_node: Any, end_node: Any):
"""
Helper function used in the graph traversal to avoid edges that cross between different
assignments. It's crucial for ensuring that the traversal only considers paths within
@@ -98,7 +157,7 @@ def partition_edge_avoid(start_node: Any, end_node: Any, edge_attrs: Dict):
:param end_node: The end node of the edge.
:type end_node: Any
:param edge_attrs: The attributes of the edge (not used in this function). Needed
- because this function is passed to :func:`are_reachable`, which expects the
+ because this function is passed to :func:`_are_reachable`, which expects the
avoid function to have this signature.
:type edge_attrs: Dict
@@ -126,9 +185,9 @@ def partition_edge_avoid(start_node: Any, end_node: Any, edge_attrs: Dict):
start_neighbor = random.choice(old_neighbors)
# Check if all old neighbors in the same assignment are still reachable.
- connected = are_reachable(
- graph, start_neighbor, partition_edge_avoid, old_neighbors
- )
+ # The "_partition_edge_avoid" function will prevent searching across
+ # a part (district) boundary
+ connected = _are_reachable(graph, start_neighbor, _partition_edge_avoid, old_neighbors)
if not connected:
return False
@@ -138,7 +197,7 @@ def partition_edge_avoid(start_node: Any, end_node: Any, edge_attrs: Dict):
return True
-def affected_parts(partition: Partition) -> Set[int]:
+def _affected_parts(partition: Partition) -> Set[int]:
"""
Checks which partitions were affected by the change of nodes.
@@ -168,7 +227,7 @@ def affected_parts(partition: Partition) -> Set[int]:
def contiguous(partition: Partition) -> bool:
"""
- Check if the parts of a partition are connected using :func:`networkx.is_connected`.
+ Check if the parts of a partition are connected
:param partition: The proposed next :class:`~gerrychain.partition.Partition`
:type partition: Partition
@@ -176,9 +235,8 @@ def contiguous(partition: Partition) -> bool:
:returns: Whether the partition is contiguous
:rtype: bool
"""
- return all(
- nx.is_connected(partition.subgraphs[part]) for part in affected_parts(partition)
- )
+
+ return all(is_connected_bfs(partition.subgraphs[part]) for part in _affected_parts(partition))
def contiguous_bfs(partition: Partition) -> bool:
@@ -192,16 +250,36 @@ def contiguous_bfs(partition: Partition) -> bool:
:returns: Whether the parts of this partition are connected
:rtype: bool
"""
- parts_to_check = affected_parts(partition)
-
- # Generates a subgraph for each district and perform a BFS on it
- # to check connectedness.
- for part in parts_to_check:
- adj = nx.to_dict_of_lists(partition.subgraphs[part])
- if _bfs(adj) is False:
- return False
- return True
+ # frm: TODO: Refactoring: Figure out why this routine, contiguous_bfs() exists.
+ #
+ # It is mentioned in __init__.py so maybe it is used externally in legacy code.
+ #
+ # However, I have changed the code so that it just calls contiguous() and all
+ # of the tests pass, so I am going to assume that my comment below is accurate,
+ # that is, I am assuming that this function does not need to exist independently
+ # except for legacy purposes. Stated differently, if someone can verify that
+ # this routine is NOT needed for legacy purposes, then we can just delete it.
+ #
+ # It seems to be exactly the same conceptually as contiguous(). It looks
+ # at the "affected" parts - those that have changed node
+ # assignments from parent, and sees if those parts are
+ # contiguous.
+ #
+ # frm: Original Code:
+ #
+ # parts_to_check = _affected_parts(partition)
+ #
+ # # Generates a subgraph for each district and perform a BFS on it
+ # # to check connectedness.
+ # for part in parts_to_check:
+ # adj = nx.to_dict_of_lists(partition.subgraphs[part])
+ # if _bfs(adj) is False:
+ # return False
+ #
+ # return True
+
+ return contiguous(partition)
def number_of_contiguous_parts(partition: Partition) -> int:
@@ -213,7 +291,7 @@ def number_of_contiguous_parts(partition: Partition) -> int:
:rtype: int
"""
parts = partition.assignment.parts
- return sum(1 for part in parts if nx.is_connected(partition.subgraphs[part]))
+ return sum(1 for part in parts if is_connected_bfs(partition.subgraphs[part]))
# Create an instance of SelfConfiguringLowerBound using the number_of_contiguous_parts function.
@@ -235,10 +313,31 @@ def contiguous_components(partition: Partition) -> Dict[int, list]:
subgraphs of that part of the partition
:rtype: dict
"""
- return {
- part: [subgraph.subgraph(nodes) for nodes in nx.connected_components(subgraph)]
- for part, subgraph in partition.subgraphs.items()
- }
+
+ # frm: TODO: Documentation: Migration Guide: NX vs RX Issues here:
+ #
+ # The call on subgraph() below is perhaps problematic because it will renumber
+ # node_ids...
+ #
+ # The issue is not that the code is incorrect (with RX there is really no other
+ # option), but rather that any legacy code will be unprepared to deal with the fact
+ # that the subgraphs returned are (I think) three node translations away from the
+ # original NX-Graph object's node_ids.
+ #
+ # Translations:
+ #
+ # 1) From NX to RX when partition was created
+ # 2) From top-level RX graph to the partition's subgraphs for each part (district)
+ # 3) From each part's subgraph to the subgraphs of contiguous_components...
+ #
+
+ connected_components_in_each_partition = {}
+ for part, subgraph in partition.subgraphs.items():
+ # create a subgraph for each set of connected nodes in the part's nodes
+ list_of_connected_subgraphs = subgraph.subgraphs_for_connected_components()
+ connected_components_in_each_partition[part] = list_of_connected_subgraphs
+
+ return connected_components_in_each_partition
def _bfs(graph: Dict[int, list]) -> bool:
@@ -254,11 +353,11 @@ def _bfs(graph: Dict[int, list]) -> bool:
"""
q = [next(iter(graph))]
visited = set()
- total_vertices = len(graph)
+ num_nodes = len(graph)
# Check if the district has a single vertex. If it does, then simply return
# `True`, as it's trivially connected.
- if total_vertices <= 1:
+ if num_nodes <= 1:
return True
# bfs!
@@ -271,4 +370,31 @@ def _bfs(graph: Dict[int, list]) -> bool:
visited.add(neighbor)
q += [neighbor]
- return total_vertices == len(visited)
+ return num_nodes == len(visited)
+
+
+# frm: TODO: Testing: Verify that is_connected_bfs() works - add a test or two...
+
+# frm: TODO: Refactoring: Move this code into graph.py. It is all about the Graph...
+
+
+# frm: TODO: Documentation: This code was obtained from the web - probably could be optimized...
+# This code replaced calls on nx.is_connected()
+def is_connected_bfs(graph: Graph):
+ if not graph:
+ return True
+
+ nodes = list(graph.node_indices)
+
+ start_node = random.choice(nodes)
+ visited = {start_node}
+ queue = [start_node]
+
+ while queue:
+ current_node = queue.pop(0)
+ for neighbor in graph.neighbors(current_node):
+ if neighbor not in visited:
+ visited.add(neighbor)
+ queue.append(neighbor)
+
+ return len(visited) == len(nodes)
diff --git a/gerrychain/constraints/validity.py b/gerrychain/constraints/validity.py
index c93d74f4..0ef8ac48 100644
--- a/gerrychain/constraints/validity.py
+++ b/gerrychain/constraints/validity.py
@@ -1,8 +1,10 @@
-from ..updaters import CountySplit
-from .bounds import Bounds
+from typing import Callable, Dict, List
+
import numpy
-from typing import Callable, List, Dict
+
from ..partition import Partition
+from ..updaters import CountySplit
+from .bounds import Bounds
class Validator:
@@ -48,9 +50,7 @@ def __call__(self, partition: Partition) -> bool:
elif is_valid is True:
pass
else:
- raise TypeError(
- "Constraint {} returned a non-boolean.".format(repr(constraint))
- )
+ raise TypeError("Constraint {} returned a non-boolean.".format(repr(constraint)))
# all constraints are satisfied
return True
@@ -93,9 +93,7 @@ def population(partition):
return Bounds(population, bounds=bounds)
-def deviation_from_ideal(
- partition: Partition, attribute: str = "population"
-) -> Dict[int, float]:
+def deviation_from_ideal(partition: Partition, attribute: str = "population") -> Dict[int, float]:
"""
Computes the deviation of the given ``attribute`` from exact equality
among parts of the partition. Usually ``attribute`` is the population, and
@@ -117,9 +115,7 @@ def deviation_from_ideal(
total = sum(partition[attribute].values())
ideal = total / number_of_districts
- return {
- part: (value - ideal) / ideal for part, value in partition[attribute].items()
- }
+ return {part: (value - ideal) / ideal for part, value in partition[attribute].items()}
def districts_within_tolerance(
diff --git a/gerrychain/graph/__init__.py b/gerrychain/graph/__init__.py
index ba52fa59..fc366d83 100644
--- a/gerrychain/graph/__init__.py
+++ b/gerrychain/graph/__init__.py
@@ -1,7 +1,7 @@
"""
This module provides a :class:`~gerrychain.graph.Graph` class that
extends the :class:`networkx.Graph` and includes some useful methods
-for working with graphs representing geographic data. The class
+for working with graphs representing geographic data. The class
:class:`~gerrychain.graph.Graph` is the only part of this module that
is intended to be used directly by users of GerryChain.
diff --git a/gerrychain/graph/adjacency.py b/gerrychain/graph/adjacency.py
index 3fa6c284..f5848501 100644
--- a/gerrychain/graph/adjacency.py
+++ b/gerrychain/graph/adjacency.py
@@ -10,9 +10,10 @@
"""
import warnings
-from geopandas import GeoDataFrame
from typing import Dict
+from geopandas import GeoDataFrame
+
def neighbors(df: GeoDataFrame, adjacency: str) -> Dict:
if adjacency not in ("rook", "queen"):
@@ -81,9 +82,7 @@ def intersections_with_neighbors(geometries):
:rtype: Generator
"""
for i, neighbors in neighboring_geometries(geometries):
- intersections = {
- j: geometries[i].intersection(geometries[j]) for j in neighbors
- }
+ intersections = {j: geometries[i].intersection(geometries[j]) for j in neighbors}
yield (i, intersections)
@@ -110,9 +109,7 @@ def warn_for_overlaps(intersection_pairs):
yield (i, intersections)
if len(overlaps) > 0:
warnings.warn(
- "Found overlaps among the given polygons. Indices of overlaps: {}".format(
- overlaps
- )
+ "Found overlaps among the given polygons. Indices of overlaps: {}".format(overlaps)
)
diff --git a/gerrychain/graph/geo.py b/gerrychain/graph/geo.py
index 0fc01997..b2acf719 100644
--- a/gerrychain/graph/geo.py
+++ b/gerrychain/graph/geo.py
@@ -6,11 +6,12 @@
"""
from collections import Counter
-from gerrychain.vendor.utm import from_latlon
# from shapely.geometry.base import BaseGeometry
from geopandas import GeoDataFrame
+from gerrychain.vendor.utm import from_latlon
+
def utm_of_point(point):
"""
@@ -84,9 +85,7 @@ def reprojected(df):
"""
utm = identify_utm_zone(df)
return df.to_crs(
- "+proj=utm +zone={utm} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(
- utm=utm
- )
+ "+proj=utm +zone={utm} +ellps=WGS84 +datum=WGS84 +units=m +no_defs".format(utm=utm)
)
diff --git a/gerrychain/graph/graph.py b/gerrychain/graph/graph.py
index fdd905a8..2b439632 100644
--- a/gerrychain/graph/graph.py
+++ b/gerrychain/graph/graph.py
@@ -9,23 +9,35 @@
Note:
This module relies on NetworkX, pandas, and geopandas, which should be installed and
imported as required.
+
+TODO: Documentation: Update top-level documentation for graph.py
"""
import functools
import json
-from typing import Any
import warnings
+# frm: codereview note: removed type hints that are now baked into Python
+from typing import Any, Generator, Iterable, Optional, Union
+
+import geopandas as gp
import networkx
-from networkx.classes.function import frozen
-from networkx.readwrite import json_graph
+import numpy
import pandas as pd
+import rustworkx
+import scipy
+from networkx.readwrite import json_graph
+from shapely.ops import unary_union
+from shapely.prepared import prep
from .adjacency import neighbors
from .geo import GeometryError, invalid_geometries, reprojected
-from typing import List, Iterable, Optional, Set, Tuple, Union
+# frm: TODO: Refactor: Move json_serialize() closer to its use.
+#
+# It should not be the first thing someone sees when looking at this code...
+#
def json_serialize(input_object: Any) -> Optional[int]:
"""
This function is used to handle one of the common issues that
@@ -48,74 +60,604 @@ def json_serialize(input_object: Any) -> Optional[int]:
return None
-class Graph(networkx.Graph):
+class Graph:
"""
- Represents a graph to be partitioned, extending the :class:`networkx.Graph`.
+ frm TODO: Documentation: Clean up this documentation
+
+ frm: this class encapsulates / hides the underlying graph which can either be a
+ NetworkX graph or a RustworkX graph. The intent is that it provides the same
+ external interface as a NetworkX graph (for all of the uses that GerryChain cares
+ about, at least) so that legacy code that operated on NetworkX based Graph objects
+ can continue to work unchanged.
+
+ When a graph is added to a partition, however, the NX graph will be converted into
+ an RX graph and the NX graph will become unaccessible to the user. The RX graph
+ may also be "frozen" the way the NX graph was "frozen" in the legacy code, but we
+ have not yet gotten that far in the implementation.
+
+ It is not clear whether the code that does the heavy lifting on partitions will
+ need to use the old NX syntax or whether it will be useful to allow unfettered
+ access to the RX graph so that RX code can be used in these modules. TBD...
+
- This class includes additional class methods for constructing graphs from shapefiles,
- and for saving and loading graphs in JSON format.
"""
- def __repr__(self):
- return "".format(len(self.nodes), len(self.edges))
+ # Note: This class cannot have a constructor - because there is code that assumes
+ # that it can use the default constructor to create instances of it.
+ # That code is buried deep in non GerryChain code, so I don't really understand
+ # what it is doing, but the assignment of nx_graph and rx_graph class attributes/members
+ # needs to happen in the "from_xxx()" routines.
+
+ # frm: TODO: Documentation: Add documentation for new data members I am adding:
+ # _nx_graph, _rx_graph, _node_id_to_parent_node_id_map, _is_a_subgraph
+ # _node_id_to_original_nx_node_id_map
+ # => used to recreate NX graph from an RX graph and also
+ # as an aid for testing
@classmethod
- def from_networkx(cls, graph: networkx.Graph) -> "Graph":
+ def from_networkx(cls, nx_graph: networkx.Graph) -> "Graph":
"""
- Create a Graph instance from a networkx.Graph object.
+ Create a :class:`Graph` from a NetworkX.Graph object
- :param graph: The networkx graph to be converted.
- :type graph: networkx.Graph
+ This supports the use case of users creating a graph using NetworkX
+ which is convenient - both for users of the previous implementation of
+ a GerryChain object which was a subclass of NetworkX.Graph and for
+ users more generally who are familiar with NetworkX.
- :returns: The converted graph as an instance of this class.
- :rtype: Graph
+ Note that most users will not ever call this function directly,
+ because they can create a GerryChain Partition object directly
+ from a NetworkX graph, and the Partition initialization code
+ will use this function to convert the NetworkX graph to a
+ GerryChain Graph object.
+
+ :param nx_graph: A NetworkX.Graph object with node and edge data
+ to be converted into a GerryChain Graph object.
+ :type nx_graph: networkx.Graph
+
+ :returns: ...text...
+ :rtype:
"""
- g = cls(graph)
- return g
+ graph = cls()
+ graph._nx_graph = nx_graph
+ graph._rx_graph = None
+ graph._is_a_subgraph = False # See comments on RX subgraph issues.
+ # Maps node_ids in the graph to the "parent" node_ids in the parent graph.
+ # For top-level graphs, this is just an identity map
+ graph._node_id_to_parent_node_id_map = {node_id: node_id for node_id in graph.node_indices}
+ # Maps node_ids in the graph to the "original" node_ids in parent graph.
+ # For top-level graphs, this is just an identity map
+ graph._node_id_to_original_nx_node_id_map = {
+ node_id: node_id for node_id in graph.node_indices
+ }
+ graph.nx_to_rx_node_id_map = (
+ None # only set when an NX based graph is converted to be an RX based graph
+ )
+ return graph
@classmethod
- def from_json(cls, json_file: str) -> "Graph":
+ def from_rustworkx(cls, rx_graph: rustworkx.PyGraph) -> "Graph":
+ """
+
+
+ Create a :class:`Graph` from a RustworkX.PyGraph object
+
+ There are three primary use cases for this routine:
+ 1) converting an NX-based Graph to be an RX-based
+ Graph, 2) creating a subgraph of an RX-based Graph, and
+ 3) creating a Graph whose node_ids do not need to be
+ mapped to some previous graph's node_ids.
+
+ In a little more detail:
+
+ 1) A typical way to use GerryChain is to create a graph
+ using NetworkX functionality and to then rely on the
+ initialization code in the Partition class to create
+ an RX-based Graph object. That initialization code
+ constructs a RustworkX PyGraph and then uses this
+ routine to create an RX-based Graph object, and it then
+ creates maps from the node_ids of the resulting RX-based
+ Graph back to the original NetworkX.Graph's node_ids.
+
+ 2) When creating a subgraph of a RustworkX PyGraph
+ object, the node_ids of the subgraph are (in general)
+ different from those of the parent graph. So we
+ create a mapping from the subgraph's node_ids to the
+ node_ids of the parent. The subgraph() routine
+ creates a RustworkX PyGraph subgraph, then uses this
+ routine to create an RX-based Graph using that subgraph,
+ and it then creates the mapping of subgraph node_ids
+ to the parent (RX) graph's node_ids.
+
+ 3) In those cases where no node_id mapping is needed
+ this routine provides a simple way to create an
+ RX-based GerryChain graph object.
+
+ :param rx_graph: a RustworkX PyGraph object
+ :type rx_graph: rustworkx.PyGraph
+
+ :returns: a GerryChain Graph object with an embedded RustworkX.PyGraph object
+ :rtype: "Graph"
"""
- Load a graph from a JSON file in the NetworkX json_graph format.
- :param json_file: Path to JSON file.
- :type json_file: str
+ # Ensure that the RX graph has node and edge data dictionaries
+ #
+ # While NX graphs always have node and edge data dictionaries,
+ # the node data for the nodes in RX graphs do not have to be
+ # a data dictionary - they can be any Python object. Since
+ # gerrychain code depends on having a data dictionary
+ # associated with nodes and edges, we need to check the RX
+ # graph to see if it already has node and edge data and if so,
+ # whether that node and edge data is a data dictionary.
+ #
+ # Note that there is no way to change the type of the data
+ # associated with an RX node. So if the data for a node
+ # is not already a dict then we have an unrecoverable error.
+ #
+ # However, RX does allow you to update the data for edges,
+ # so if we find an edge with no data (None), then we can
+ # create an empty dict for the edge data, and if the edge
+ # data is some other type, then we can also replace the
+ # existing edge data with a dict (retaining the original
+ # data as a value in the new dict)
+
+ graph = cls()
+ for node_id in rx_graph.node_indices():
+ data_dict = rx_graph[node_id]
+ if not isinstance(data_dict, dict):
+ # Unrecoverable error - see above...
+ raise Exception(
+ "from_rustworkx(): RustworkX graph does not have node_data dictionary"
+ )
- :returns: The loaded graph as an instance of this class.
- :rtype: Graph
+ for edge_id in rx_graph.edge_indices():
+ data_dict = rx_graph.get_edge_data_by_index(edge_id)
+ if data_dict is None:
+ # Create an empty dict for edge_data
+ graph.update_edge_by_index(edge_id, {})
+ if not isinstance(data_dict, dict):
+ # Create a new dict with the existing edge_data as an item
+ graph.update_edge_by_index(edge_id, {"__original_rx_edge_data": data_dict})
+
+ graph = cls()
+ graph._rx_graph = rx_graph
+ graph._nx_graph = None
+ graph._is_a_subgraph = False # See comments on RX subgraph issues.
+
+ # frm: TODO: Documentation: from_rustworkx(): Make these comments more coherent
+ #
+ # Instead of these very specific comments, just say that at this
+ # point, we don't know whether the graph is derived from NX, is a
+ # subgraph, or is something that can stand alone, so the maps are
+ # all identity maps. It is responsibility of callers to reset the
+ # maps if that is appropriate...
+
+ # Maps node_ids in the graph to the "parent" node_ids in the parent graph.
+ # For top-level graphs, this is just an identity map
+ graph._node_id_to_parent_node_id_map = {node_id: node_id for node_id in graph.node_indices}
+
+ # This routine assumes that the rx_graph was not derived from an "original" NX
+ # graph, so the RX node_ids are considered to be the "original" node_ids and
+ # we create an identity map - each node_id maps to itself as the "original" node_id
+ #
+ # If this routine is used for an RX-based Graph that was indeed derived from an
+ # NX graph, then it is the responsibility of the caller to set
+ # the _node_id_to_original_nx_node_id_map appropriately.
+ graph._node_id_to_original_nx_node_id_map = {
+ node_id: node_id for node_id in graph.node_indices
+ }
+
+ # only set when an NX based graph is converted to be an RX based graph
+ graph.nx_to_rx_node_id_map = None
+
+ return graph
+
+ def to_networkx_graph(self) -> networkx.Graph:
+ """
+ Create a NetworkX.Graph object that has the same nodes, edges,
+ node_data, and edge_data as the GerryChain Graph object.
+
+ The intended purpose of this routine is to allow a user to
+ run a MarkovChain - which uses an embedded RustworkX graph
+ and then extract an equivalent version of that graph with all
+ of its data as a NetworkX.Graph object - in order to use
+ NetworkX routines to access and manipulate the graph.
+
+ In short, this routine allows users to use NetworkX
+ functionality on a graph after running a MarkovChain.
+
+ If the GerryChain graph object is NX-based, then this
+ routine merely returns the embedded NetworkX.Graph object.
+
+ :returns: A NetworkX.Graph object that is equivalent to the
+ GerryChain Graph object (nodes, edges, node_data, edge_data)
+ :rtype: networkx.Graph
+ """
+ if self.is_nx_graph():
+ return self.get_nx_graph()
+
+ if not self.is_rx_graph():
+ raise TypeError("Graph passed to 'to_networkx_graph()' must be a rustworkx graph")
+
+ # We have an RX-based Graph, and we want to create a NetworkX Graph object
+ # that has all of the node data and edge data, and which has the
+ # node_ids and edge_ids of the original NX graph.
+ #
+ # Original node_ids are those that were used in the original NX
+ # Graph used to create the RX-based Graph object.
+ #
+
+ # Confirm that this RX based graph was derived from an NX graph...
+ if self._node_id_to_original_nx_node_id_map is None:
+ raise Exception("to_networkx_graph(): _node_id_to_original_nx_node_id_map is None")
+
+ rx_graph = self.get_rx_graph()
+
+ # Extract node data
+ node_data = []
+ for node_id in rx_graph.node_indices():
+ node_payload = rx_graph[node_id]
+ # Get the "original" node_id
+ original_nx_node_id = self.original_nx_node_id_for_internal_node_id(node_id)
+ node_data.append({"node_name": original_nx_node_id, **node_payload})
+
+ # Extract edge data
+ edge_data = []
+ for edge_id in rx_graph.edge_indices():
+ edge = rx_graph.get_edge_endpoints_by_index(edge_id)
+ edge_0_node_id = edge[0]
+ edge_1_node_id = edge[1]
+ # Get the "original" node_ids
+ edge_0_original_nx_node_id = self.original_nx_node_id_for_internal_node_id(
+ edge_0_node_id
+ )
+ edge_1_original_nx_node_id = self.original_nx_node_id_for_internal_node_id(
+ edge_1_node_id
+ )
+ edge_payload = rx_graph.get_edge_data_by_index(edge_id)
+ # Add edges and edge data using the original node_ids
+ # as the names/IDs for the nodes that make up the edge
+ edge_data.append(
+ {
+ "source": edge_0_original_nx_node_id,
+ "target": edge_1_original_nx_node_id,
+ **edge_payload,
+ }
+ )
+
+ # Create Pandas DataFrames
+
+ nodes_df = pd.DataFrame(node_data)
+ edges_df = pd.DataFrame(edge_data)
+
+ # Create a NetworkX Graph object from the edges_df, using
+ # "source", and "tartet" to define edge node_ids, and adding
+ # all attribute data (True).
+ nx_graph = networkx.from_pandas_edgelist(edges_df, "source", "target", True, networkx.Graph)
+
+ # Add all of the node_data, using the "node_name" attr as the NX Graph node_id
+ nodes_df = nodes_df.set_index("node_name")
+ networkx.set_node_attributes(nx_graph, nodes_df.to_dict(orient="index"))
+
+ return nx_graph
+
+ # frm: TODO: Refactoring: Create a defined type name "node_id" to use instead of "Any"
+ #
+ # This is purely cosmetic, but it would provide an opportunity to add a comment that
+ # talked about NX node_ids vs. RX node_ids and hence why the type for a node_id is
+ # a vague "Any"...
+
+ def original_nx_node_id_for_internal_node_id(self, internal_node_id: Any) -> Any:
+ """
+ Translate a node_id to its "original" node_id.
+
+ :param internal_node_id: A node_id to be translated
+ :type internal_node_id: Any
+
+ :returns: A translated node_id
+ :rtype: Any
+ """
+ return self._node_id_to_original_nx_node_id_map[internal_node_id]
+
+ # frm: TODO: Testing: Create a test for this routine
+ def original_nx_node_ids_for_set(self, set_of_node_ids: set[Any]) -> Any:
+ """
+ Translate a set of node_ids to their "original" node_ids.
+
+ :param set_of_node_ids: A set of node_ids to be translated
+ :type set_of_node_ids: set[Any]
+
+ :returns: A set of translated node_ids
+ :rtype: set[Any]
+ """
+ _node_id_to_original_nx_node_id_map = self._node_id_to_original_nx_node_id_map
+ new_set = {_node_id_to_original_nx_node_id_map[node_id] for node_id in set_of_node_ids}
+ return new_set
+
+ # frm: TODO: Testing: Create a test for this routine
+ def original_nx_node_ids_for_list(self, list_of_node_ids: list[Any]) -> list[Any]:
+ """
+ Translate a list of node_ids to their "original" node_ids.
+
+ :param list_of_node_ids: A list of node_ids to be translated
+ :type list_of_node_ids: list[Any]
+
+ :returns: A list of translated node_ids
+ :rtype: list[Any]
+ """
+ # Utility routine to quickly translate a set of node_ids to their original node_ids
+ _node_id_to_original_nx_node_id_map = self._node_id_to_original_nx_node_id_map
+ new_list = [_node_id_to_original_nx_node_id_map[node_id] for node_id in list_of_node_ids]
+ return new_list
+
+ def internal_node_id_for_original_nx_node_id(self, original_nx_node_id: Any) -> Any:
+ """
+ Discover the "internal" node_id in the current GerryChain graph
+ that corresponds to the "original" node_id in the top-level
+ graph (presumably an NX-based graph object).
+
+ This was originally created to facilitate testing where it was
+ convenient to express the test success criteria in terms of
+ "original" node_ids, but the actual test needed to be made
+ using the "internal" (RX) node_ids.
+
+ :param original_nx_node_id: The "original" node_id
+ :type original_nx_node_id: Any
+
+ :returns: The corresponding "internal" node_id
+ :rtype: Any
+ """
+ # Note: TODO: Performance: This code is inefficient but it is not a priority to fix now...
+ #
+ # The code reverses the dict that maps internal node_ids to "original"
+ # node_ids, which has an entry for every node in the graph - hence large
+ # for large graphs, which is costly, but worse - it does this every time
+ # it is called, so if the calling code is looping through a list of nodes
+ # then this reverse dict computation will happen each time.
+ #
+ # The obvious fix is to just create the reverse map once when the "internal"
+ # graph is created. This would be simple to do and safe, because the
+ # "internal" graph is frozen.
+ #
+ # However, at present (December 2025) this routine is only ever used for
+ # tests, so I am putting it on the back burner...
+
+ # reverse the map so we can go from original node_id to internal node_id
+ orignal_node_id_to_internal_node_id_map = {
+ v: k for k, v in self._node_id_to_original_nx_node_id_map.items()
+ }
+ return orignal_node_id_to_internal_node_id_map[original_nx_node_id]
+
+ def verify_graph_is_valid(self) -> bool:
+ """
+ Verify that the graph is valid.
+
+ This may be overkill, but the idea is that at least in
+ development mode, it would be prudent to check periodically
+ to see that the graph data structure has not been corrupted.
+
+ :returns: True if the graph is deemed valid
+ :rtype: bool
+ """
+
+ # frm: TODO: Performance: Only check verify_graph_is_valid() in development.
+ #
+ # For now, in order to assess performance differences between NX and RX
+ # I will just return True...
+ return True
+
+ # Sanity check - this is where to add additional sanity checks in the future.
+
+ # frm: TODO: Code: Enhance verify_graph_is_valid to do more...
+
+ # frm: TODO: Performance: verify_graph_is_valid() is expensive - called a lot
+ #
+ # Come up with a way to run this in "debug mode" - that is, while in development/testing
+ # but not in production. It actually accounted for 5% of runtime...
+
+ # Checks that there is one and only one graph
+ if not (
+ (self._nx_graph is not None and self._rx_graph is None)
+ or (self._nx_graph is None and self._rx_graph is not None)
+ ):
+ raise Exception("Graph.verify_graph_is_valid(): graph not properly configured")
+
+ # frm: TODO: Performance: is_nx_graph() and is_rx_graph() are expensive.
+ #
+ # Not all of the calls on these routines are needed in production - some are just
+ # sanity checking. Find a way to NOT run this code when in production.
+
+ # frm: TODO: Refactoring: Reorder these following routines in sensible order
+
+ def is_nx_graph(self) -> bool:
+ """
+ Determine if the graph is NX-based
+
+ :rtype: bool
"""
- with open(json_file) as f:
+ # frm: TODO: Performance: Only check graph_is_valid() in production
+ #
+ # Find a clever way to only run this code in development. Commenting it out for now...
+ # self.verify_graph_is_valid()
+ return self._nx_graph is not None
+
+ def get_nx_graph(self) -> networkx.Graph:
+ """
+ Return the embedded NX graph object
+
+ :rtype: networkx.Graph
+ """
+ if not self.is_nx_graph():
+ raise TypeError("Graph passed to 'get_nx_graph()' must be a networkx graph")
+ return self._nx_graph
+
+ def get_rx_graph(self) -> rustworkx.PyGraph:
+ """
+ Return the embedded RX graph object
+
+ :rtype: rustworkx.PyGraph
+ """
+ if not self.is_rx_graph():
+ raise TypeError("Graph passed to 'get_rx_graph()' must be a rustworkx graph")
+ return self._rx_graph
+
+ def is_rx_graph(self) -> bool:
+ """
+ Determine if the graph is RX-based
+
+ :rtype: bool
+ """
+ # frm: TODO: Performance: Only check graph_is_valid() in production
+ #
+ # Find a clever way to only run this code in development. Commenting it out for now...
+ # self.verify_graph_is_valid()
+ return self._rx_graph is not None
+
+ def convert_from_nx_to_rx(self) -> "Graph":
+ """
+ Convert an NX-based graph object to be an RX-based graph object.
+
+ The primary use case for this routine is support for users
+ constructing a graph using NetworkX functionality and then
+ converting that NetworkX graph to RustworkX when creating a
+ Partition object.
+
+
+ :returns: An RX-based graph that is "the same" as the given NX-based graph
+ :rtype: "Graph"
+ """
+
+ # Note that in both cases in the if-stmt below, the nodes are not copied.
+ # This is arguably dangerous, but in our case I think it is OK. Stated
+ # differently, the actual node data (the dictionaries) in the original
+ # graph (self) will be reused in the returned graph - either because we
+ # are just returning the same graph (if it is already based on rx.PyGraph)
+ # or if we are converting it from NX.
+ #
+ self.verify_graph_is_valid()
+ if self.is_nx_graph():
+
+ if self._is_a_subgraph:
+ # This routine is intended to be used in exactly one place - in converting
+ # an NX based Graph object to be RX based when creating a Partition object.
+ # In the future, it might become useful for other reasons, but until then
+ # to guard against careless uses, the code will insist that it not be a subgraph.
+
+ # frm: TODO: Documentation: Add a comment about the intended use of this routine
+ # to its overview comment above.
+ raise Exception("convert_from_nx_to_rx(): graph to be converted is a subgraph")
+
+ nx_graph = self._nx_graph
+ rx_graph = rustworkx.networkx_converter(nx_graph, keep_attributes=True)
+
+ # Note that the resulting RX graph will have multigraph set to False which
+ # ensures that there is never more than one edge between two specific nodes.
+ # This is perhaps not all that interesting in general, but it is critical
+ # when getting the edge_id from an edge using RX.edge_indices_from_endpoints()
+ # routine - because it ensures that only a single edge_id is returned...
+
+ converted_graph = Graph.from_rustworkx(rx_graph)
+
+ # Some graphs have geometry data (from a geodataframe), so preserve it if it exists
+ if hasattr(self, "geometry"):
+ converted_graph.geometry = self.geometry
+
+ # Create a mapping from the old NX node_ids to the new RX node_ids (created by
+ # RX when it converts from NX)
+ nx_to_rx_node_id_map = {
+ converted_graph.node_data(node_id)["__networkx_node__"]: node_id
+ for node_id in converted_graph._rx_graph.node_indices()
+ }
+ converted_graph.nx_to_rx_node_id_map = nx_to_rx_node_id_map
+
+ # We also have to update the _node_id_to_original_nx_node_id_map to refer to the
+ # node_ids in the NX Graph object.
+ _node_id_to_original_nx_node_id_map = {}
+ for node_id in converted_graph.node_indices:
+ original_nx_node_id = converted_graph.node_data(node_id)["__networkx_node__"]
+ _node_id_to_original_nx_node_id_map[node_id] = original_nx_node_id
+ converted_graph._node_id_to_original_nx_node_id_map = (
+ _node_id_to_original_nx_node_id_map
+ )
+
+ return converted_graph
+ elif self.is_rx_graph():
+ return self
+ else:
+ raise TypeError(
+ "Graph passed to 'convert_from_nx_to_rx()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def get_nx_to_rx_node_id_map(self) -> dict[Any, Any]:
+ """
+ Return the dict that maps NX node_ids to RX node_ids
+
+ The primary use case for this routine is to support automatically
+ converting NX-based graph objects to be RX-based when creating a
+ Partition object. The issue is that when you convert from NX to RX
+ the node_ids change and so you need to update the Partition object's
+ Assignment to use the new RX node_ids. This routine is used
+ to translate those NX node_ids to the new RX node_ids when
+ initializing a Partition object.
+
+ :rtype: dict[Any, Any]
+ """
+ # Simple getter method
+ if not self.is_rx_graph():
+ raise TypeError("Graph passed to 'get_nx_to_rx_node_id()' is not a rustworkx graph")
+
+ return self.nx_to_rx_node_id_map
+
+ @classmethod
+ def from_json(cls, json_file_name: str) -> "Graph":
+ """
+ Create a :class:`Graph` from a JSON file
+
+ :param json_file_name: JSON file
+ # frm: TODO: Documentation: more detail on contents of JSON file needed here
+ :type json_file_name: str
+
+ :returns: A GerryChain Graph object with data from JSON file
+ :rtype: "Graph"
+ """
+
+ # Note that this returns an NX-based Graph object. At some point in
+ # the future, if we embrace an all RX world, it will make sense to
+ # have it produce an RX-based Graph object.
+
+ with open(json_file_name) as f:
data = json.load(f)
- g = json_graph.adjacency_graph(data)
- graph = cls.from_networkx(g)
+
+ # A bit of Python magic - an adjacency graph is a dict of dict of dicts
+ # which is structurally equivalent to a NetworkX graph, so you can just
+ # pretend that is what it is and it all works.
+ nx_graph = json_graph.adjacency_graph(data)
+
+ graph = cls.from_networkx(nx_graph)
graph.issue_warnings()
return graph
- def to_json(
- self, json_file: str, *, include_geometries_as_geojson: bool = False
- ) -> None:
+ def to_json(self, json_file_name: str, include_geometries_as_geojson: bool = False) -> None:
"""
- Save a graph to a JSON file in the NetworkX json_graph format.
+ Dump a GerryChain Graph object to disk as a JSON file
- :param json_file: Path to target JSON file.
- :type json_file: str
- :param bool include_geometry_as_geojson: Whether to include
- any :mod:`shapely` geometry objects encountered in the graph's node
- attributes as GeoJSON. The default (``False``) behavior is to remove
- all geometry objects because they are not serializable. Including the
- GeoJSON will result in a much larger JSON file.
- :type include_geometries_as_geojson: bool, optional
+ :param json_file_name: name of JSON file to be created
+ :type json_file_name: str
- :returns: None
+ :rtype: None
"""
- data = json_graph.adjacency_data(self)
+ # frm TODO: Code: Implement graph.to_json for an RX based graph
+ if not self.is_nx_graph():
+ raise TypeError("Graph passed to 'to_json()' is not a networkx graph")
+
+ data = json_graph.adjacency_data(self._nx_graph)
if include_geometries_as_geojson:
convert_geometries_to_geojson(data)
else:
remove_geometries(data)
- with open(json_file, "w") as f:
+ with open(json_file_name, "w") as f:
json.dump(data, f, default=json_serialize)
@classmethod
@@ -123,7 +665,7 @@ def from_file(
cls,
filename: str,
adjacency: str = "rook",
- cols_to_add: Optional[List[str]] = None,
+ cols_to_add: Optional[list[str]] = None,
reproject: bool = False,
ignore_errors: bool = False,
) -> "Graph":
@@ -138,7 +680,7 @@ def from_file(
:type adjacency: str, optional
:param cols_to_add: The names of the columns that you want to
add to the graph as node attributes. Default is None.
- :type cols_to_add: Optional[List[str]], optional
+ :type cols_to_add: Optional[list[str]], optional
:param reproject: Whether to reproject to a UTM projection before
creating the graph. Default is False.
:type reproject: bool, optional
@@ -161,7 +703,6 @@ def from_file(
or install ``geopandas`` separately.
"""
- import geopandas as gp
df = gp.read_file(filename)
graph = cls.from_geodataframe(
@@ -171,7 +712,41 @@ def from_file(
reproject=reproject,
ignore_errors=ignore_errors,
)
- graph.graph["crs"] = df.crs.to_json()
+ # frm: TODO: Documentation: Make it clear that this creates an NX-based
+ # Graph object.
+ #
+ # Also add some documentation (here or elsewhere)
+ # about what CRS data is and what it is used for.
+ #
+ # Note that the NetworkX.Graph.graph["crs"] is only
+ # ever accessed in this file (graph.py), so I am not
+ # clear what it is used for. It seems to just be set
+ # and never used except to be written back out to JSON.
+ #
+ # The issue (I think) is that we do not preserve graph
+ # attributes when we convert to RX from NX, so if the
+ # user wants to write an RX based Graph back out to JSON
+ # this data (and another other graph level data) would be
+ # lost.
+ #
+ # So - need to figure out what CRS is used for...
+ #
+ # Peter commented on this in a PR comment:
+ #
+ # CRS stands for "Coordinate Reference System" which can be thought of
+ # as the projection system used for the polygons contained in the
+ # geodataframe. While it is not used in any of the graph operations of
+ # GerryChain, it may be used in things like validators and updaters. Since
+ # the CRS determines the projection system used by the underlying
+ # geodataframe, any area or perimeter computations encoded on the graph
+ # are stored with the understanding that those values may inherit
+ # distortions from projection used. We keep this around as metadata so
+ # that, in the event that the original geodataframe source is lost,
+ # the graph metadata still carries enough information for us to sanity
+ # check the area and perimeter computations if we get weird numbers.
+
+ # Store CRS data as an attribute of the NX graph
+ graph._nx_graph.graph["crs"] = df.crs.to_json()
return graph
@classmethod
@@ -179,13 +754,17 @@ def from_geodataframe(
cls,
dataframe: pd.DataFrame,
adjacency: str = "rook",
- cols_to_add: Optional[List[str]] = None,
+ cols_to_add: Optional[list[str]] = None,
reproject: bool = False,
ignore_errors: bool = False,
crs_override: Optional[Union[str, int]] = None,
) -> "Graph":
+
+ # frm: Changed to operate on a NetworkX.Graph object and then convert to a
+ # Graph object at the end of the function.
+
"""
- Creates the adjacency :class:`Graph` of geometries described by `dataframe`.
+ Create the adjacency :class:`Graph` of geometries described by `dataframe`.
The areas of the polygons are included as node attributes (with key `area`).
The shared perimeter of neighboring polygons are included as edge attributes
(with key `shared_perim`).
@@ -208,7 +787,7 @@ def from_geodataframe(
:type adjacency: str, optional
:param cols_to_add: The names of the columns that you want to
add to the graph as node attributes. Default is None.
- :type cols_to_add: Optional[List[str]], optional
+ :type cols_to_add: Optional[list[str]], optional
:param reproject: Whether to reproject to a UTM projection before
creating the graph. Default is ``False``.
:type reproject: bool, optional
@@ -252,21 +831,62 @@ def from_geodataframe(
# Generate dict of dicts of dicts with shared perimeters according
# to the requested adjacency rule
- adjacencies = neighbors(df, adjacency)
- graph = cls(adjacencies)
-
- graph.geometry = df.geometry
-
- graph.issue_warnings()
+ adjacencies = neighbors(df, adjacency) # Note - this is adjacency.neighbors()
+
+ nx_graph = networkx.Graph(adjacencies)
+
+ # frm: TODO: Documentation: Document what geometry is used for.
+ #
+ # Need to grok what geometry is used for - it is used in partition.py.plot()
+ # and maybe that is the only place it is used, but it is also used below
+ # to set other data, such as add_boundary_perimeters() and areas. The
+ # reason this is an issue is because I need to know what to carry over to
+ # the RX version of a Graph when I convert to RX when making a Partition.
+ # Partition.plot() uses this information, so it needs to be available in
+ # the RX version of a Graph - which essentially means that I need to grok
+ # how plot() works and where it gets its information and how existing
+ # users use it...
+ #
+ # There is a test failure due to geometry not being available after conversion to RX.
+ #
+ # Here is what Peter said in the PR:
+ #
+ # The geometry attribute on df is a special attribute that only appears on
+ # geodataframes. This is just a list of polygons representing some real-life
+ # geometries underneath a certain projection system (CRS). These polygons can
+ # then be fed to matplotilb to make nice plots of things, or they can be used
+ # to compute things like area and perimeter for use in updaters and validators
+ # that employ some sort of Reock score (uncommon, but unfortunately necessary in
+ # some jurisdictions). We probably don't need to store this as an attribute on
+ # the Graph._nxgraph object (or the Graph._rxgraph) object, however. In fact, it
+ # might be best to just make a Graph.dataframe attribute to store all of the
+ # graph data on, and add attributes to _nxgraph and _rxgraph nodes as needed
+ #
+
+ nx_graph.geometry = df.geometry
+
+ # frm: TODO: Refactoring: Rethink the name of add_boundary_perimeters
+ #
+ # It acts on an nx_graph which seems wrong with the given name.
+ # Maybe it should be: add_boundary_perimeters_to_nx_graph()
+ #
+ # Need to check in with Peter to see if this is considered
+ # part of the external API.
+
+ # frm: TODO: Refactoring: Create an nx_utilities module
+ #
+ # It raises the question of whether there should be an nx_utilities
+ # module for stuff designed to only work on nx_graph objects.
+ #
+ # Note that Peter said: "I like this idea"
+ #
# Add "exterior" perimeters to the boundary nodes
- add_boundary_perimeters(graph, df.geometry)
+ add_boundary_perimeters(nx_graph, df.geometry)
# Add area data to the nodes
areas = df.geometry.area.to_dict()
- networkx.set_node_attributes(graph, name="area", values=areas)
-
- graph.add_data(df, columns=cols_to_add)
+ networkx.set_node_attributes(nx_graph, name="area", values=areas)
if crs_override is not None:
df.set_crs(crs_override, inplace=True)
@@ -278,66 +898,352 @@ def from_geodataframe(
"Otherwise, please set the CRS using the `crs_override` parameter. "
"Attempting to proceed without a CRS."
)
- graph.graph["crs"] = None
+ nx_graph.graph["crs"] = None
else:
- graph.graph["crs"] = df.crs.to_json()
+ nx_graph.graph["crs"] = df.crs.to_json()
+
+ graph = cls.from_networkx(nx_graph)
+
+ # frm: Moved from earlier in the function so that we would have a Graph
+ # object (vs. NetworkX.Graph object)
+
+ graph.add_data(df, columns=cols_to_add)
+ graph.issue_warnings()
return graph
- def lookup(self, node: Any, field: Any) -> Any:
+ # Performance Note:
+ #
+ # Most of the functions in the Graph class will be called after a
+ # partition has been created and the underlying graph converted
+ # to be based on RX. So, by testing first for RX we actually
+ # save a significant amount of time because we do not need to
+ # also test for NX (if you test for NX first then you do two tests).
+ #
+
+ @property
+ def node_indices(self) -> set[Any]:
+ """
+ Return a set of the node_ids in the graph
+
+ :rtype: set[Any]
+ """
+ self.verify_graph_is_valid()
+
+ # frm: TODO: Refactoring: node_indices() does the same thing that graph.nodes does
+ # - returning a list of node_ids.
+ # Do we really want to support two ways of doing the same thing?
+ # Actually this returns a set rather than a list - not sure that matters though...
+ #
+ # My code uses node_indices() to make it clear we are talking about node_ids...
+ #
+ # The question is whether to deprecate nodes()...
+
+ if self.is_rx_graph():
+ return set(self._rx_graph.node_indices())
+ elif self.is_nx_graph():
+ return set(self._nx_graph.nodes)
+ else:
+ raise TypeError(
+ "Graph passed to 'node_indices()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ @property
+ def edge_indices(self) -> set[Any]:
"""
- Lookup a node/field attribute.
+ Return a set of the edge_ids in the graph
- :param node: Node to look up.
- :type node: Any
- :param field: Field to look up.
- :type field: Any
+ :rtype: set[Any]
+ """
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # A set of edge_ids for the edges
+ return set(self._rx_graph.edge_indices())
+ elif self.is_nx_graph():
+ # A set of edge_ids (tuples) extracted from the graph's EdgeView
+ return set(self._nx_graph.edges)
+ else:
+ raise TypeError(
+ "Graph passed to 'edge_indices()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def get_edge_from_edge_id(self, edge_id: Any) -> tuple[Any, Any]:
+ """
+ Return the edge (tuple of node_ids) corresponding to the
+ given edge_id
+
+ Note that in NX, an edge_id is the same as an edge - it is
+ just a tuple of node_ids. However, in RX, an edge_id is
+ an integer, so if you want to get the tuple of node_ids
+ you need to use the edge_id to get that tuple...
+
+ :param edge_id: The ID of the desired edge
+ :type edge_id: Any
+
+ :returns: An edge, namely a tuple of node_ids
+ :rtype: tuple[Any, Any]
+ """
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # In RX, we need to go get the edge tuple
+ # frm: TODO: Performance - use get_edge_endpoints_by_index() to get edge
+ #
+ # The original RX code (before October 27, 2025):
+ # return self._rx_graph.edge_list()[edge_id]
+ endpoints = self._rx_graph.get_edge_endpoints_by_index(edge_id)
+ return (endpoints[0], endpoints[1])
+ elif self.is_nx_graph():
+ # In NX, the edge_id is also the edge tuple
+ return edge_id
+ else:
+ raise TypeError(
+ "Graph passed to 'get_edge_from_edge_id()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
- :returns: The value of the attribute `field` at `node`.
+ # frm: TODO: Refactoring: Create abstract "edge" and "edge_id" type names
+ #
+ # As with node_id, this is cosmetic but it will provide a nice place to
+ # put a comment about the difference between NX and RX and it will make
+ # the type annotations make more sense...
+
+ def get_edge_id_from_edge(self, edge: tuple[Any, Any]) -> Any:
+ """
+ Get the edge_id that corresponds to the given edge.
+
+ In RX an edge_id is an integer that designates an edge (an edge is
+ a tuple of node_ids). In NX, an edge_id IS the tuple of node_ids.
+ So, in general, to support both NX and RX, if you want to get access
+ to the edge data for an edge (tuple of node_ids), you need to
+ ask for the edge_id.
+
+ This functionality is needed, for instance, when
+
+ :param edge: A tuple of node_ids.
+ :type edge: tuple[Any, Any]
+
+ :returns: The ID associated with the given edge
:rtype: Any
"""
- return self.nodes[node][field]
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+
+ # frm: TODO: Performance: Perhaps get_edge_id_from_edge() is too expensive...
+ #
+ # If this routine becomes a signficant performance issue, then perhaps
+ # we can change the algorithms that use it so that it is not needed.
+ # In particular, there are several routines in tree.py that use it
+ # by traversing chains of nodes (successors and predecessors) which
+ # requires the code to recreate the edges from the nodes in hand. This
+ # was not a problem in an NX world - the tuple of nodes was exactly what
+ # and edge_id was, but in the RX world it is not - necessitating this routine.
+ #
+ # BUT... If the code had chains of edges rather than chains of nodes,
+ # then you could have the edge_ids at hand already and avoid having to
+ # do this lookup.
+ #
+ # However, it may be that the RX edge_indices_from_endpoints() is smart
+ # enough (for instance if it caches a dict mapping) that the performance
+ # hit is minimal... Here's to hoping that RX is "smart enough"... ;-)
+
+ # Note that while in general the routine, edge_indices_from_endpoints(),
+ # can return more than one edge in the case of a Multi-Graph (a graph that
+ # allows more than one edge between two nodes), we can rely on it only
+ # returning a single edge because the RX graph object has multigraph set
+ # to false by RX.networkx_converter() - because the NX graph was undirected...
+ #
+ edge_indices = self._rx_graph.edge_indices_from_endpoints(edge[0], edge[1])
+ return edge_indices[0] # there will always be one and only one
+ elif self.is_nx_graph():
+ # In NX, the edge_id is also the edge tuple
+ return edge
+ else:
+ raise TypeError(
+ "Graph passed to 'get_edge_id_from_edge()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
@property
- def node_indices(self):
- return set(self.nodes)
+ def nodes(self) -> list[Any]:
+ """
+ Return a list of all of the node_ids in the graph.
+
+ This routine still exists because there is a lot of legacy
+ code that uses this syntax to iterate through all of the nodes
+ in a graph.
+
+ There is another routine, node_indices(), which does essentially
+ the same thing (it returns a set of node_ids, however, rather than
+ a list).
+
+ Why have two routines that do the same thing? The answer is that with
+ move to RX, it seemed appropriate to emphasize the distinction
+ between objects and the IDs for objects, hence the introduction of
+ node_indices() and edge_indices() routines. This distinction is
+ critical for edges, but mostly not important for nodes. In fact
+ this routine is implemented by just converting node_indices to a list.
+ So, it is essentially a style issue - when referring to nodes, we
+ are almost always really referring to node_ids, so why not use a
+ routine called node_indices()?
+
+ Note that there is a subtle point to be made about node names vs.
+ node_ids. It was common before the transition to RX to create
+ nodes with IDs that were essentially names. That is, the ID had
+ semantic weight. This is not true with RX node_ids. So, any
+ code that relies on the semantics of a node's ID (treating it
+ like a name) is suspect in the new RX world.
+
+ :returns: A list of all of the node_ids in the graph
+ :rtype: list[Any]
+ """
+
+ # frm: TODO: Documentation: Warn users in Migration Guide that nodes() has gone away
+ #
+ # Since the legacy code implemented a GerryChain Graph as a subclass of NetworkX.Graph
+ # legacy code could take advantage of NX cleverness - NX returns a NodeView object for
+ # nx_graph.nodes which supports much more than just a list of node_ids (which is all that
+ # code below does).
+ #
+ # Probably the most common use of nx_graph.nodes was to access node data as in:
+ #
+ # nx_graph.nodes[node_id][]
+ #
+ # In the new world, to do that you need to do:
+ #
+ # graph.node_data(node_id)[]
+ #
+ # So, almost the same number of keystrokes, but if a legacy user uses nodes[...] the
+ # old way, it won't work out well.
+ #
+
+ # frm: TODO: Refactoring: Think about whether to do away entirely with graph.nodes
+ #
+ # All this routine does now is to coerce the set of nodes obtained by node_indices()
+ # to be a list (which I think is unnecessary). So, why have it at all? Why not just
+ # tell legacy users via an exception that it no longer exists?
+ #
+ # On the other hand, it maybe does no harm to allow legacy users to indulge in
+ # what appears to be a very common idiom in legacy code...
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # A list of integer node_ids
+ return list(self._rx_graph.node_indices())
+ elif self.is_nx_graph():
+ # A list of node_ids -
+ return list(self._nx_graph.nodes)
+ else:
+ raise TypeError(
+ "Graph passed to 'nodes()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
@property
- def edge_indices(self):
- return set(self.edges)
+ def edges(self) -> set[tuple[Any, Any]]:
+ """
+ Return a set of all of the edges in the graph, where each
+ edge is a tuple of node_ids
- def add_data(
- self, df: pd.DataFrame, columns: Optional[Iterable[str]] = None
- ) -> None:
+ :rtype: set[tuple[Any, Any]]:
+ """
+ # Return a set of edge tuples
+
+ # frm: TODO: Code: ???: Should edges return a list instead of a set?
+ #
+ # Peter said he thought users would expect a list - but why?
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # A set of tuples for the edges
+ return set(self._rx_graph.edge_list())
+ elif self.is_nx_graph():
+ # A set of tuples extracted from the graph's EdgeView
+ return set(self._nx_graph.edges)
+ else:
+ raise TypeError(
+ "Graph passed to 'edges()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def add_edge(self, node_id1: Any, node_id2: Any) -> None:
+ """
+ Add an edge to the graph from node_id1 to node_id2
+
+ :param node_id1: The node_id for one of the nodes in the edge
+ :type node_id1: Any
+ :param node_id2: The node_id for one of the nodes in the edge
+ :type node_id2: Any
+
+ :rtype: None
+ """
+
+ # frm: TODO: Code: add_edge(): Check that nodes exist and that they have data dicts.
+ #
+ # This checking should probably be limited to development mode, but
+ # the issue is that an RX node need not have a data value that is
+ # a dict, but GerryChain code depends on having a data dict. So,
+ # it makes sense to test and make sure that the nodes exist and
+ # have a data dict...
+
+ # frm: TODO: Code: add_edge(): Do we need to check to make sure the edge does not already
+ # exist?
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # empty dict tells RX the edge data will be a dict
+ self._rx_graph.add_edge(node_id1, node_id2, {})
+ elif self.is_nx_graph():
+ self._nx_graph.add_edge(node_id1, node_id2)
+ else:
+ raise TypeError(
+ "Graph passed to 'add_edge()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def add_data(self, df: pd.DataFrame, columns: Optional[Iterable[str]] = None) -> None:
"""
Add columns of a DataFrame to a graph as node attributes
by matching the DataFrame's index to node ids.
:param df: Dataframe containing given columns.
:type df: :class:`pandas.DataFrame`
- :param columns: List of dataframe column names to add. Default is None.
+ :param columns: list of dataframe column names to add. Default is None.
:type columns: Optional[Iterable[str]], optional
:returns: None
"""
+ if not (self.is_nx_graph()):
+ raise TypeError("Graph passed to 'add_data()' is not a networkx graph")
+
if columns is None:
columns = list(df.columns)
check_dataframe(df[columns])
+ # Create dict: {node_id: {attr_name: attr_value}}
column_dictionaries = df.to_dict("index")
- networkx.set_node_attributes(self, column_dictionaries)
+ nx_graph = self._nx_graph
+ networkx.set_node_attributes(nx_graph, column_dictionaries)
- if hasattr(self, "data"):
- self.data[columns] = df[columns] # type: ignore
+ if hasattr(nx_graph, "data"):
+ nx_graph.data[columns] = df[columns] # type: ignore
else:
- self.data = df[columns]
+ nx_graph.data = df[columns]
def join(
self,
dataframe: pd.DataFrame,
- columns: Optional[List[str]] = None,
+ columns: Optional[list[str]] = None,
left_index: Optional[str] = None,
right_index: Optional[str] = None,
) -> None:
@@ -349,7 +1255,7 @@ def join(
:type dataframe: :class:`pandas.DataFrame`
:columns: The columns whose data you wish to add to the graph.
If not provided, all columns are added. Default is None.
- :type columns: Optional[List[str]], optional
+ :type columns: Optional[list[str]], optional
:left_index: The node attribute used to match nodes to rows.
If not provided, node IDs are used. Default is None.
:type left_index: Optional[str], optional
@@ -371,52 +1277,1048 @@ def join(
column_dictionaries = df.to_dict()
+ # frm: TODO: Code: Implement graph.join() for RX
+ #
+ # This is low priority given that current suggested coding
+ # strategy of creating the graph using NX and then letting
+ # GerryChain convert it automatically to RX. In this scenario
+ # any joins would happen to the NX-based graph only.
+
+ if not self.is_nx_graph():
+ raise TypeError("Graph passed to join() is not a networkx graph")
+ nx_graph = self._nx_graph
+
if left_index is not None:
- ids_to_index = networkx.get_node_attributes(self, left_index)
+ ids_to_index = networkx.get_node_attributes(nx_graph, left_index)
else:
# When the left_index is node ID, the matching is just
# a redundant {node: node} dictionary
ids_to_index = dict(zip(self.nodes, self.nodes))
node_attributes = {
- node_id: {
- column: values[index] for column, values in column_dictionaries.items()
- }
+ node_id: {column: values[index] for column, values in column_dictionaries.items()}
for node_id, index in ids_to_index.items()
}
- networkx.set_node_attributes(self, node_attributes)
+ networkx.set_node_attributes(nx_graph, node_attributes)
@property
- def islands(self) -> Set:
+ def islands(self) -> set[Any]:
"""
- :returns: The set of degree-0 nodes.
- :rtype: Set
+ Return a set of all node_ids that are not connected via an
+ edge to any other node in the graph - that is, nodes with
+ degree = 0
+
+ :returns: A set of all node_ids for nodes of degree 0
+ :rtype: set[Any]
"""
- return set(node for node in self if self.degree[node] == 0)
+ # Return all nodes of degree 0 (those not connected in an edge to another node)
+ return set(node_id for node_id in self.node_indices if self.degree(node_id) == 0)
+
+ def is_directed(self) -> bool:
+ # frm TODO: Code: Delete this code: graph.is_directed() once convinced it is safe to
+ # do so...
+ #
+ # I added it because code in contiguity.py
+ # called nx.is_connected() which eventually called is_directed()
+ # assuming the graph was an nx_graph.
+ #
+ # Changing from return False to raising an exception just to make
+ # sure nobody uses it.
+
+ raise NotImplementedError("graph.is_directed() should not be used")
def warn_for_islands(self) -> None:
"""
- :returns: None
+ Issue a warning if there are any islands in the graph - that is,
+ if there are any nodes in the graph that are not connected to any
+ other node (degree = 0)
- :raises: UserWarning if the graph has any islands (degree-0 nodes).
+ :rtype: None
"""
islands = self.islands
if len(self.islands) > 0:
- warnings.warn(
- "Found islands (degree-0 nodes). Indices of islands: {}".format(islands)
- )
+ warnings.warn("Found islands (degree-0 nodes). Indices of islands: {}".format(islands))
def issue_warnings(self) -> None:
"""
- :returns: None
+ Issue any warnings concerning the content or structure
+ of the graph.
- :raises: UserWarning if the graph has any red flags (right now, only islands).
+ :rtype: None
"""
self.warn_for_islands()
+ def __len__(self) -> int:
+ """
+ Return the number of nodes in the graph
+
+ :rtype: int
+ """
+ return len(self.node_indices)
+
+ def __getattr__(self, __name: str) -> Any:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
-def add_boundary_perimeters(graph: Graph, geometries: pd.Series) -> None:
+ :returns: ...text...
+ :rtype:
+ """
+ # frm: TODO: Code: Get rid of _getattr_ eventually - it is very dangerous...
+
+ # frm: Interesting bug lurking if __name is "nx_graph". This occurs when legacy code
+ # uses the default constructor, Graph(), and then references a built-in NX
+ # Graph method, such as my_graph.add_edges(). In this case the built-in NX
+ # Graph method is not defined, so __getattr__() is called to try to figure out
+ # what it could be. This triggers the call below to self.is_nx_graph(), which
+ # references self._nx_graph (which is undefined/None) which triggers another
+ # call to __getattr__() which is BAD...
+ #
+ # I think the solution is to not rely on testing whether nx_graph and rx_graph
+ # are None - but rather to have explicit is_nx_or_rx_graph data member which
+ # is set to one of "NX", "RX", "not_set".
+ #
+ # For now, I am just going to return None if __name is "_nx_graph" or "_rx_graph".
+ #
+ # Peter's comments from PR:
+ #
+ # Oh interesting; good catch! The flag approach seems like a good solution to me.
+ # It's very, very rare to use the default constructor, so I don't imagine that
+ # people will really run into this.
+
+ # frm: TODO: Code: Fix this hack (in __getattr__) - see comment above...
+ if (__name == "_nx_graph") or (__name == "_rx_graph"):
+ return None
+
+ # If attribute doesn't exist on this object, try
+ # its underlying graph object...
+ if self.is_rx_graph():
+ return object.__getattribute__(self._rx_graph, __name)
+ elif self.is_nx_graph():
+ return object.__getattribute__(self._nx_graph, __name)
+ else:
+ raise TypeError(
+ "Graph passed to '__gettattr__()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def __getitem__(self, __name: str) -> Any:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ # frm: TODO: Code: Does any of the code actually use __getitem__ ?
+ #
+ # It is a clever Python way to use square bracket
+ # notation to access something (anything) you want.
+ #
+ # In this case, it returns the NetworkX AtlasView
+ # of neighboring nodes - looks like a dictionary
+ # with a key of the neighbor node_id and a value
+ # with the neighboring node's data (another dict).
+ #
+ # I am guessing that it is only ever used to get
+ # a list of the neighbor node_ids, in which case
+ # it is functionally equivalent to self.neighbors().
+ #
+ # *sigh*
+ #
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ # frm TODO: Code: Decide if __getitem__() should work for RX
+ raise TypeError("Graph._getitem__() is not defined for a rustworkx graph")
+ elif self.is_nx_graph():
+ return self._nx_graph[__name]
+ else:
+ raise TypeError(
+ "Graph passed to '__getitem__()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def __iter__(self) -> Iterable[Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ yield from self.node_indices
+
+ def subgraph(self, nodes: Iterable[Any]) -> "Graph":
+ """
+ Create a subgraph that contains the given nodes.
+
+ Note that creating a subgraph of an RustworkX (RX) graph
+ renumbers the nodes, so that a node that had node_id: 4
+ in the parent graph might have node_id: 2 in the subgraph.
+ This is a HUGE difference from the NX world where the
+ node_ids in a subgraph do not change from those in the
+ parent graph.
+
+ In order to make sense of the nodes in a subgraph in the
+ RX world, we need to maintain mappings from the node_ids
+ in the subgraph to the node_ids of the immediate parent
+ graph and to the "original" top-level graph that contains
+ all of the nodes. You will notice the creation of those
+ maps in the code below.
+
+ :param nodes: The nodes to be included in the subgraph
+ :type nodes: Iterable[Any]
+
+ :returns: A subgraph containing the given nodes.
+ :rtype: "Graph"
+ """
+
+ """
+ frm: RX Documentation:
+
+ Subgraphs are one of the biggest differences between NX and RX, because RX creates new
+ node_ids for the nodes in the subgraph, starting at 0. So, if you create a subgraph with
+ a list of nodes: [45, 46, 47] the nodes in the subgraph will be [0, 1, 2].
+
+ This creates problems for functions that operate on subgraphs and want to return results
+ involving node_ids to the caller. To solve this, we define a
+ _node_id_to_parent_node_id_map whenever we create a subgraph that will provide the node_id
+ in the parent for each node in the subgraph. For NX this is a no-op, and the
+ _node_id_to_parent_node_id_map is just an identity map - each node_id is
+ mapped to itself. For RX, however, we store the parent_node_id in the node's data before
+ creating the subgraph, and then in the subgraph, we use the parent's node_id to construct
+ a map from the subgraph node_id to the parent_node_id.
+
+ This means that any function that wants to return results involving node_ids can safely
+ just translate node_ids using the _node_id_to_parent_node_id_map, so that the results make
+ sense in the caller's context.
+
+ A note of caution: if the caller retains the subgraph after using it in a function call,
+ the caller should almost certainly not use the node_ids in the subgraph for ANYTHING.
+ It would be safest to reset the value of the subgraph to None after using it as an
+ argument to a function call.
+
+ Also, for both RX and NX, we set the _node_id_to_parent_node_id_map to be the identity map
+ for top-level graphs on the off chance that there is a function that takes both top-level
+ graphs and subgraphs as a parameter. This allows the function to just always do the node
+ translation. In the case of a top-level graph the translation will be a no-op, but it will
+ be correct.
+
+ Also, we set the _is_a_subgraph = True, so that we can detect whether a parameter passed
+ into a function is a top-level graph or not. This will allow us to debug the code to
+ determine if assumptions about a parameter always being a subgraph is accurate. It also
+ helps to educate future readers of the code that subgraphs are "interesting"...
+ """
+
+ self.verify_graph_is_valid()
+
+ new_subgraph = None
+
+ if self.is_nx_graph():
+ nx_subgraph = self._nx_graph.subgraph(nodes)
+ new_subgraph = self.from_networkx(nx_subgraph)
+ # for NX, the node_ids in subgraph are the same as in the parent graph
+ _node_id_to_parent_node_id_map = {node: node for node in nodes}
+ _node_id_to_original_nx_node_id_map = {node: node for node in nodes}
+ elif self.is_rx_graph():
+ if isinstance(nodes, frozenset) or isinstance(nodes, set):
+ nodes = list(nodes)
+
+ # For RX, the node_ids in the subgraph change, so we need a way to map subgraph node_ids
+ # into parent graph node_ids. To do so, we add the parent node_id into the node data
+ # so that in the subgraph we can find it and then create the map.
+ #
+ # Note that this works because the node_data dict is shared by the nodes in both the
+ # parent graph and the subgraph, so we can set the "parent" node_id in the parent before
+ # creating the subgraph, and that value will be available in the subgraph even though
+ # the subgraph will have a different node_id for the same node.
+ #
+ # This value is removed from the node_data below after creating the subgraph.
+ #
+ for node_id in nodes:
+ self.node_data(node_id)["parent_node_id"] = node_id
+
+ # It is also important for all RX graphs (subgraphs or top-level graphs) to have
+ # a mapping from RX node_id to the "original" NX node_id. However, we do not need
+ # to do what we do with the _node_id_to_parent_node_id_map and set the value of
+ # the "original" node_id now, because this value never changes for a node. It
+ # should already have been set for each node by the standard RX code that
+ # converts from NX to RX (which sets the "__networkx_node__" attribute to be
+ # the NX node_id). We just check to make sure that it is in fact set.
+ #
+ for node_id in nodes:
+ if "__networkx_node__" not in self.node_data(node_id):
+ raise Exception("subgraph: internal error: original_nx_node_id not set")
+
+ rx_subgraph = self._rx_graph.subgraph(nodes)
+ new_subgraph = self.from_rustworkx(rx_subgraph)
+
+ # frm: Create the map from subgraph node_id to parent graph node_id
+ _node_id_to_parent_node_id_map = {}
+ for subgraph_node_id in new_subgraph.node_indices:
+ _node_id_to_parent_node_id_map[subgraph_node_id] = new_subgraph.node_data(
+ subgraph_node_id
+ )["parent_node_id"]
+ # value no longer needed, so delete it
+ new_subgraph.node_data(subgraph_node_id).pop("parent_node_id")
+
+ # frm: Create the map from subgraph node_id to the original graph's node_id
+ _node_id_to_original_nx_node_id_map = {}
+ for subgraph_node_id in new_subgraph.node_indices:
+ _node_id_to_original_nx_node_id_map[subgraph_node_id] = new_subgraph.node_data(
+ subgraph_node_id
+ )["__networkx_node__"]
+ else:
+ raise TypeError(
+ "Graph passed to 'subgraph()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ new_subgraph._is_a_subgraph = True
+ new_subgraph._node_id_to_parent_node_id_map = _node_id_to_parent_node_id_map
+ new_subgraph._node_id_to_original_nx_node_id_map = _node_id_to_original_nx_node_id_map
+
+ return new_subgraph
+
+ # frm: TODO: Refactoring: Create abstract type name for "Flip" and "Flip_Dict".
+ #
+ # This is cosmetic, but it would (IMHO) make the code easier to understand, and it
+ # would provide a logical place to define WTF a flip is...
+
+ def translate_subgraph_node_ids_for_flips(self, flips: dict[Any, int]) -> dict[Any, int]:
+ """
+ Translate the given flips so that the subgraph node_ids in the flips
+ have been translated to the appropriate node_ids in the
+ parent graph.
+
+ The flips parameter is a dict mapping node_ids to parts (districts).
+
+ This routine is used when a computation that creates flips is made
+ on a subgraph, but those flips want to be translated into the context
+ of the parent graph at the end of the computation.
+
+ For more details, refer to the larger comment on subgraphs...
+
+ :param flips: A dict containing "flips" which associate a node with
+ a new part in a partition (a "part" is the same as a district in
+ common parlance).
+ :type flips: dict[Any, int]
+
+ :returns: A dict containing "flips" that have been translated to have
+ node_ids appropriate for the parent graph
+ :rtype: dict[Any, int]
+ """
+
+ # frm: TODO: Documentation: Write an overall comment on subgraphs and node_id maps
+
+ translated_flips = {}
+ for subgraph_node_id, part in flips.items():
+ parent_node_id = self._node_id_to_parent_node_id_map[subgraph_node_id]
+ translated_flips[parent_node_id] = part
+
+ return translated_flips
+
+ def translate_subgraph_node_ids_for_set_of_nodes(self, set_of_nodes: set[Any]) -> set[Any]:
+ """
+ Translate the given set_of_nodes to have the appropriate
+ node_ids for the parent graph.
+
+ This routine is used when a computation that creates a set of nodes is made
+ on a subgraph, but those nodes want to be translated into the context
+ of the parent graph at the end of the computation.
+
+ For more details, refer to the larger comment on subgraphs...
+
+ :param set_of_nodes: A set of node_ids in a subgraph
+ :type set_of_nodes: set[Any]
+
+ :returns: A set of node_ids that have been translated to have
+ the node_ids appropriate for the parent graph
+ :rtype: set[Any]
+ """
+ # This routine replaces the node_ids of the subgraph with the node_ids
+ # for the same node in the parent graph. This routine is used to
+ # when a computation is made on a subgraph but the resulting set of nodes
+ # being returned want to be the appropriate node_ids for the parent graph.
+ translated_set_of_nodes = set()
+ for node_id in set_of_nodes:
+ translated_set_of_nodes.add(self._node_id_to_parent_node_id_map[node_id])
+ return translated_set_of_nodes
+
+ def generic_bfs_edges(
+ self, source, neighbors=None, depth_limit=None
+ ) -> Generator[tuple[Any, Any], None, None]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ # frm: Code copied from GitHub:
+ #
+ # https://github.com/networkx/networkx/blob/main/networkx/algorithms/traversal/breadth_first_search.py
+ #
+ # Code was not modified - it worked as written for both rx.PyGraph and a graph.Graph object
+ # with an RX graph embedded in it...
+
+ """Iterate over edges in a breadth-first search.
+
+ The breadth-first search begins at `source` and enqueues the
+ neighbors of newly visited nodes specified by the `neighbors`
+ function.
+
+ Parameters
+ ----------
+ G : RustworkX.PyGraph object (not a NetworkX graph)
+
+ source : node
+ Starting node for the breadth-first search; this function
+ iterates over only those edges in the component reachable from
+ this node.
+
+ neighbors : function
+ A function that takes a newly visited node of the graph as input
+ and returns an *iterator* (not just a list) of nodes that are
+ neighbors of that node with custom ordering. If not specified, this is
+ just the ``G.neighbors`` method, but in general it can be any function
+ that returns an iterator over some or all of the neighbors of a
+ given node, in any order.
+
+ depth_limit : int, optional(default=len(G))
+ Specify the maximum search depth.
+
+ Yields
+ ------
+ edge
+ Edges in the breadth-first search starting from `source`.
+
+ Examples
+ --------
+ >>> G = nx.path_graph(7)
+ >>> list(nx.generic_bfs_edges(G, source=0))
+ [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
+ >>> list(nx.generic_bfs_edges(G, source=2))
+ [(2, 1), (2, 3), (1, 0), (3, 4), (4, 5), (5, 6)]
+ >>> list(nx.generic_bfs_edges(G, source=2, depth_limit=2))
+ [(2, 1), (2, 3), (1, 0), (3, 4)]
+
+ The `neighbors` param can be used to specify the visitation order of each
+ node's neighbors generically. In the following example, we modify the default
+ neighbor to return *odd* nodes first:
+
+ >>> def odd_first(n):
+ ... return sorted(G.neighbors(n), key=lambda x: x % 2, reverse=True)
+
+ >>> G = nx.star_graph(5)
+ >>> list(nx.generic_bfs_edges(G, source=0)) # Default neighbor ordering
+ [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)]
+ >>> list(nx.generic_bfs_edges(G, source=0, neighbors=odd_first))
+ [(0, 1), (0, 3), (0, 5), (0, 2), (0, 4)]
+
+ Notes
+ -----
+ This implementation is from `PADS`_, which was in the public domain
+ when it was first accessed in July, 2004. The modifications
+ to allow depth limits are based on the Wikipedia article
+ "`Depth-limited-search`_".
+
+ .. _PADS: http://www.ics.uci.edu/~eppstein/PADS/BFS.py
+ .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search
+ """
+ # frm: These two if-stmts work for both rx.PyGraph and gerrychain.Graph with RX inside
+ if neighbors is None:
+ neighbors = self.neighbors
+ if depth_limit is None:
+ depth_limit = len(self)
+
+ seen = {source}
+ n = len(self)
+ depth = 0
+ next_parents_children = [(source, neighbors(source))]
+ while next_parents_children and depth < depth_limit:
+ this_parents_children = next_parents_children
+ next_parents_children = []
+ for parent, children in this_parents_children:
+ for child in children:
+ # frm: avoid cycles - don't process a child twice...
+ if child not in seen:
+ seen.add(child)
+ # frm: add this node's children to list to be processed later...
+ next_parents_children.append((child, neighbors(child)))
+ yield (parent, child)
+ if len(seen) == n:
+ return
+ depth += 1
+
+ # frm: TODO: Testing: Add tests for all of the new routines I have added...
+
+ def generic_bfs_successors_generator(
+ self, root_node_id: Any
+ ) -> Generator[tuple[Any, Any], None, None]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ # frm: Generate in sequence a tuple for the parent (node_id) and
+ # the children of that node (list of node_ids).
+ parent = root_node_id
+ children = []
+ for p, c in self.generic_bfs_edges(root_node_id):
+ # frm: parent-child pairs appear ordered by their parent, so
+ # we can collect all of the children for a node by just
+ # iterating through pairs until the parent changes.
+ if p == parent:
+ children.append(c)
+ continue
+ yield (parent, children)
+ # new parent, so reset parent and children variables to
+ # be the new parent (p) and a new children list containing
+ # this first child (c), and continue looping
+ children = [c]
+ parent = p
+ yield (parent, children)
+
+ def generic_bfs_successors(self, root_node_id: Any) -> dict[Any:Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ return dict(self.generic_bfs_successors_generator(root_node_id))
+
+ def generic_bfs_predecessors(self, root_node_id: Any) -> dict[Any, Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ # frm Note: We had do implement our own, because the built-in RX version only worked
+ # for directed graphs.
+ predecessors = []
+ for s, t in self.generic_bfs_edges(root_node_id):
+ predecessors.append((t, s))
+ return dict(predecessors)
+
+ def predecessors(self, root_node_id: Any) -> dict[Any:Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+
+ """
+ frm: It took me a while to grok what predecessors() and successors()
+ were all about. In the end, it was simple - they are just the
+ parents and the children of a tree that "starts" at the given root
+ node.
+
+ What took me a while to understand is that this effectively
+ converts an undirected cyclic graph into a DAG. What is clever is
+ that as soon as it detects a cycle it stops traversing the graph.
+ The other thing that is clever is that the DAG that is created
+ either starts at the top or the bottom. For successors(), the
+ DAG starts at the top, so that the argument to successors() is
+ the root of the tree. However, in the case of predecessors()
+ the argument to predecessors() is a leaf node, and the "tree"
+ can have multiple "roots".
+
+ In both cases, you can ask what the associated parent or
+ children are of any node in the graph. If you ask for the
+ successors() you will get a list of the children nodes.
+ If you ask for the predecessors() you will get the single
+ parent node.
+
+ I think that the successors() graph is deterministic (except
+ for the order of the child nodes), meaning that for a given
+ graph no matter what order you created nodes and added edges,
+ you will get the same set of children for a given node.
+ However, for predecessors(), there are many different
+ DAGs that might be created depending on which edge the
+ algorithm decides is the single parent.
+
+ All of this is interesting, but I have not yet spent the
+ time to figure out why it matters in the code.
+
+ TODO: Code: predecessors(): Decide if it makes sense to have different implementations
+ for NX and RX. The code below has the original definition
+ from the pre-RX codebase, but the code for RX will work
+ for NX too - so I think that there is no good reason to
+ have different code for NX. Maybe no harm, but on the other
+ hand, it seems like a needless difference and hence more
+ complexity...
+
+ TODO: Performance: see if the performance of the built-in NX
+ version is significantly better than the generic one.
+ """
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ return self.generic_bfs_predecessors(root_node_id)
+ elif self.is_nx_graph():
+ return {a: b for a, b in networkx.bfs_predecessors(self._nx_graph, root_node_id)}
+ else:
+ raise TypeError(
+ "Graph passed to 'predecessors()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def successors(self, root_node_id: Any) -> dict[Any:Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ return self.generic_bfs_successors(root_node_id)
+ elif self.is_nx_graph():
+ return {a: b for a, b in networkx.bfs_successors(self._nx_graph, root_node_id)}
+ else:
+ raise TypeError(
+ "Graph passed to 'successors()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def neighbors(self, node_id: Any) -> list[Any]:
+ """
+ Return a list of the node_ids of the nodes that are neighbors of
+ the given node - that is, all of the nodes that are directly
+ connected to the given node by an edge.
+
+ :param node_id: The ID of a node
+ :type node_id: Any
+
+ :returns: A list of neighbor node_ids
+ :rtype: list[Any]
+ """
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ return list(self._rx_graph.neighbors(node_id))
+ elif self.is_nx_graph():
+ return list(self._nx_graph.neighbors(node_id))
+ else:
+ raise TypeError(
+ "Graph passed to 'neighbors()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def degree(self, node_id: Any) -> int:
+ """
+ Return the degree of the given node, that is, the number
+ of other nodes directly connected to the given node.
+
+ :param node_id: The ID of a node
+ :type node_id: Any
+
+ :returns: Number of nodes directly connected to the given node
+ :rtype: int
+ """
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ return self._rx_graph.degree(node_id)
+ elif self.is_nx_graph():
+ return self._nx_graph.degree(node_id)
+ else:
+ raise TypeError(
+ "Graph passed to 'degree()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ def node_data(self, node_id: Any) -> dict[Any, Any]:
+ """
+ Return the data dictionary that contains the given node's data.
+
+ As docmented elsewhere, in GerryChain code before the conversion
+ to RustworkX, users could access node data using the syntax:
+
+ graph.nodes[node_id][attribute_name]
+
+ This was because a GerryChain Graph object in that codebase was a
+ subclass of NetworkX.Graph, and NetworkX was clever and implemented
+ dict-like behavior for the syntax graph.nodes[]...
+
+ This Python cleverness was not carried over to the RustworkX
+ implementation, so in the current GerryChain Graph implementation
+ users need to access node data using the syntax:
+
+ graph.node_data(node_id)[attribute_name]
+
+ :param node_id: The ID of a node
+ :type node_id: Any
+
+ :returns: Data dictionary containing the given node's data.
+ :rtype: dict[Any, Any]
+ """
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ data_dict = self._rx_graph[node_id]
+ elif self.is_nx_graph():
+ data_dict = self._nx_graph.nodes[node_id]
+ else:
+ raise TypeError(
+ "Graph passed to 'node_data()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ if not isinstance(data_dict, dict):
+ raise TypeError("graph.node_data(): data for node is not a dict")
+
+ return data_dict
+
+ def edge_data(self, edge_id: Any) -> dict[Any, Any]:
+ """
+ Return the data dictionary that contains the data for the given edge.
+
+ Note that in NetworkX an edge_id can be almost anything, for instance,
+ a string or even a tuple. However, in RustworkX, an edge_id is
+ an integer. This code handles both kinds of edge_ids - hence the
+ type, Any.
+
+ :param edge_id: The ID of the edge
+ :type edge_id: Any
+
+ :returns: The data dictionary for the given edge's data
+ :rtype: dict[Any, Any]
+ """
+
+ self.verify_graph_is_valid()
+
+ if self.is_rx_graph():
+ data_dict = self._rx_graph.get_edge_data_by_index(edge_id)
+ elif self.is_nx_graph():
+ data_dict = self._nx_graph.edges[edge_id]
+ else:
+ raise TypeError(
+ "Graph passed to 'edge_data()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ # Sanity check - RX edges do not need to have a data dict for node data
+ #
+ # A GerryChain Graph object should always be constructed with a data dict
+ # for edge data, but it doesn't hurt to check.
+ if not isinstance(data_dict, dict):
+ raise TypeError("graph.edge(): data for edge is not a dict")
+
+ return data_dict
+
+ # frm: TODO: Documentation: Note: I added the laplacian_matrix routines as methods of the Graph
+ # class because they are only ever used on Graph objects. It
+ # bloats the Graph class, but it still seems like the best
+ # option.
+ #
+ # A goal is to encapsulate ALL NX dependencies in this file.
+
+ def laplacian_matrix(self) -> scipy.sparse.csr_array:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ # A local "gc" (as in GerryChain) version of the laplacian matrix
+
+ # frm: TODO: Code: laplacian_matrix(): should NX and RX return same type (float vs. int)?
+ #
+ # The NX version returns a matrix of integer values while the
+ # RX version returns a matrix of floating point values. I
+ # think the reason is that the RX.adjacency_matrix() call
+ # returns an array of floats.
+ #
+ # Since the laplacian matrix is used for further numeric
+ # processing, I don't think this matters, but I should
+ # check to be 100% certain.
+
+ if self.is_rx_graph():
+ rx_graph = self._rx_graph
+ # 1. Get the adjacency matrix
+ adj_matrix = rustworkx.adjacency_matrix(rx_graph)
+ # 2. Calculate the degree matrix (simplified for this example)
+ degree_matrix = numpy.diag([rx_graph.degree(node) for node in rx_graph.node_indices()])
+ # 3. Calculate the Laplacian matrix
+ np_laplacian_matrix = degree_matrix - adj_matrix
+ # 4. Convert the NumPy array to a scipy.sparse array
+ laplacian_matrix = scipy.sparse.csr_array(np_laplacian_matrix)
+ elif self.is_nx_graph():
+ nx_graph = self._nx_graph
+ laplacian_matrix = networkx.laplacian_matrix(nx_graph)
+ else:
+ raise TypeError(
+ "Graph passed into laplacian_matrix() is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ return laplacian_matrix
+
+ def normalized_laplacian_matrix(self) -> scipy.sparse.dia_array:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+
+ def create_scipy_sparse_array_from_rx_graph(
+ rx_graph: rustworkx.PyGraph,
+ ) -> scipy.sparse.coo_matrix:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
+ num_nodes = rx_graph.num_nodes()
+
+ rows = []
+ cols = []
+ data = []
+
+ for u, v in rx_graph.edge_list():
+ rows.append(u)
+ cols.append(v)
+ data.append(1) # simple adjacency matrix, so just 1 not weight attribute
+
+ sparse_array = scipy.sparse.coo_matrix(
+ (data, (rows, cols)), shape=(num_nodes, num_nodes)
+ )
+
+ return sparse_array
+
+ if self.is_rx_graph():
+ rx_graph = self._rx_graph
+ """
+ The following is code copied from the networkx linalg file, laplacianmatrix.py
+ for normalized_laplacian_matrix. Below this code has been modified to work for
+ gerrychain (with an RX-based Graph object)
+
+ import numpy as np
+ import scipy as sp
+
+ if nodelist is None:
+ nodelist = list(G)
+ A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
+ n, _ = A.shape
+ diags = A.sum(axis=1)
+ D = sp.sparse.dia_array((diags, 0), shape=(n, n)).tocsr()
+ L = D - A
+ with np.errstate(divide="ignore"):
+ diags_sqrt = 1.0 / np.sqrt(diags)
+ diags_sqrt[np.isinf(diags_sqrt)] = 0
+ DH = sp.sparse.dia_array((diags_sqrt, 0), shape=(n, n)).tocsr()
+ return DH @ (L @ DH)
+
+ """
+
+ # frm: TODO: Get someone to validate that this in fact does the right thing.
+ #
+ # The one test, test_proposal_returns_a_partition[spectral_recom], in test_proposals.py
+ # that uses normalized_laplacian_matrix() now passes, but it is for a small 6x6 graph
+ # and hence is not a real world test...
+ #
+
+ A = create_scipy_sparse_array_from_rx_graph(rx_graph)
+ n, _ = A.shape # shape() => dimensions of the array (rows, cols), so n = num_rows
+ diags = A.sum(axis=1) # sum of values in each row => column vector
+ diags = diags.T # convert to a row vector / 1D array
+ D = scipy.sparse.dia_array((diags, [0]), shape=(n, n)).tocsr()
+ L = D - A
+ with numpy.errstate(divide="ignore"):
+ diags_sqrt = 1.0 / numpy.sqrt(diags)
+ diags_sqrt[numpy.isinf(diags_sqrt)] = 0
+ DH = scipy.sparse.dia_array((diags_sqrt, 0), shape=(n, n)).tocsr()
+ normalized_laplacian = DH @ (L @ DH)
+ return normalized_laplacian
+
+ elif self.is_nx_graph():
+ nx_graph = self._nx_graph
+ laplacian_matrix = networkx.normalized_laplacian_matrix(nx_graph)
+ else:
+ raise TypeError(
+ "Graph passed into normalized_laplacian_matrix() is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ return laplacian_matrix
+
+ def subgraphs_for_connected_components(self) -> list["Graph"]:
+ """
+ Create and return a list of subgraphs for each set of nodes
+ in the given graph that are connected.
+
+ Note that a connected graph is one in which there is a path
+ from every node in the graph to every other node in the
+ graph.
+
+ Note also that each of the subgraphs returned is a
+ maximal subgraph of connected components, meaning that there
+ is no other larger subgraph of connected components that
+ includes it as a subset.
+
+ :returns: A list of "maximal" subgraphs each of which
+ contains nodes that are connected.
+ :rtype: list["Graph"]
+ """
+
+ if self.is_rx_graph():
+ rx_graph = self.get_rx_graph()
+ subgraphs = [self.subgraph(nodes) for nodes in rustworkx.connected_components(rx_graph)]
+ elif self.is_nx_graph():
+ nx_graph = self.get_nx_graph()
+ subgraphs = [self.subgraph(nodes) for nodes in networkx.connected_components(nx_graph)]
+ else:
+ raise TypeError(
+ "Graph passed to 'subgraphs_for_connected_components()' is "
+ "neither a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ return subgraphs
+
+ def num_connected_components(self) -> int:
+ """
+ Return the number of connected components.
+
+ Note: A connected component is a maximal subgraph
+ where every vertex is reachable from every other vertex in
+ that same subgraph. In a graph that is not fully connected,
+ connected components are the separate, distinct "islands" of
+ connected nodes. Every node in a graph belongs to exactly
+ one connected component.
+
+ :returns: The number of connected components
+ :rtype: int
+ """
+
+ # frm: TODO: Performance: num_connected_components(): do both NX and RX have builtins
+ # for this?
+ #
+ # NetworkX and RustworkX both have a routine number_connected_components().
+ # I am guessing that it is more efficient to call these than it is
+ # to construct the connected components and then determine how many
+ # of them there are.
+ #
+ # So - should be a simple issue of trying it and running tests, but
+ # I will do that another day...
+
+ if self.is_rx_graph():
+ rx_graph = self.get_rx_graph()
+ connected_components = rustworkx.connected_components(rx_graph)
+ elif self.is_nx_graph():
+ nx_graph = self.get_nx_graph()
+ connected_components = list(networkx.connected_components(nx_graph))
+ else:
+ raise TypeError(
+ "Graph passed to 'num_connected_components()' is neither "
+ "a networkx-based graph nor a rustworkx-based graph"
+ )
+
+ num_cc = len(connected_components)
+ return num_cc
+
+ def is_a_tree(self) -> bool:
+ """
+ Return whether the current graph is a tree - meaning that
+ it is connected and that it has no cycles.
+
+ :returns: Whether the current graph is a tree
+ :rtype: bool
+ """
+
+ # frm: TODO: Refactor: is_a_tree() is only called in a test (test_tree.py) - delete it?
+ #
+ # Talk to Peter to see if there is any reason to keep this function. Does anyone
+ # use it externally?
+ #
+ # On the other hand, perhaps it is OK to keep it even if it is only ever used in a test...
+
+ if self.is_rx_graph():
+ rx_graph = self.get_rx_graph()
+ num_nodes = rx_graph.num_nodes()
+ num_edges = rx_graph.num_edges()
+
+ # Condition 1: Check if the number of edges is one less than the number of nodes
+ if num_edges != num_nodes - 1:
+ return False
+
+ # Condition 2: Check for connectivity (and implicitly, acyclicity if E = V-1)
+ # A graph with V-1 edges and no cycles must be connected.
+ # A graph with V-1 edges and connected must be acyclic.
+
+ # We can check connectivity by ensuring there's only one connected component.
+ connected_components = rustworkx.connected_components(rx_graph)
+ if len(connected_components) != 1:
+ return False
+
+ return True
+ elif self.is_nx_graph():
+ nx_graph = self.get_nx_graph()
+ return networkx.is_tree(nx_graph)
+ else:
+ raise TypeError(
+ "Graph passed to 'is_a_tree()' is neither a "
+ "networkx-based graph nor a rustworkx-based graph"
+ )
+
+
+def add_boundary_perimeters(nx_graph: networkx.Graph, geometries: pd.Series) -> None:
"""
Add shared perimeter between nodes and the total geometry boundary.
@@ -428,22 +2330,31 @@ def add_boundary_perimeters(graph: Graph, geometries: pd.Series) -> None:
:returns: The updated graph.
:rtype: Graph
"""
- from shapely.ops import unary_union
- from shapely.prepared import prep
+
+ # frm: TODO: add_boundary_perimeters(): Think about whether it is reasonable to require this
+ # to work on an NetworkX.Graph object.
+
+ # frm: The original code operated on the Graph object which was a subclass of
+ # NetworkX.Graph. I have changed it to operate on a NetworkX.Graph object
+ # with the understanding that callers will reach down into a Graph object
+ # and pass in the inner nx_graph data member.
+
+ if not (isinstance(nx_graph, networkx.Graph)):
+ raise TypeError("Graph passed into add_boundary_perimeters() " "is not a networkx graph")
prepared_boundary = prep(unary_union(geometries).boundary)
boundary_nodes = geometries.boundary.apply(prepared_boundary.intersects)
- for node in graph:
- graph.nodes[node]["boundary_node"] = bool(boundary_nodes[node])
+ for node in nx_graph:
+ nx_graph.nodes[node]["boundary_node"] = bool(boundary_nodes[node])
if boundary_nodes[node]:
total_perimeter = geometries[node].boundary.length
shared_perimeter = sum(
- neighbor_data["shared_perim"] for neighbor_data in graph[node].values()
+ neighbor_data["shared_perim"] for neighbor_data in nx_graph[node].values()
)
boundary_perimeter = total_perimeter - shared_perimeter
- graph.nodes[node]["boundary_perim"] = boundary_perimeter
+ nx_graph.nodes[node]["boundary_perim"] = boundary_perimeter
def check_dataframe(df: pd.DataFrame) -> None:
@@ -524,6 +2435,15 @@ class FrozenGraph:
The class uses `__slots__` for improved memory efficiency.
"""
+ # frm: TODO: Code: Rename the internal data member, "graph", to be something else.
+ # The reason is that a NetworkX.Graph object already has an internal
+ # data member named, "graph", which is just a dict for the data
+ # associated with the Networkx.Graph object.
+ #
+ # So to avoid confusion, naming the frozen graph something like
+ # _frozen_graph would make it easier for a future reader of the
+ # code to avoid confusion...
+
__slots__ = ["graph", "size"]
def __init__(self, graph: Graph) -> None:
@@ -535,30 +2455,85 @@ def __init__(self, graph: Graph) -> None:
:returns: None
"""
- self.graph = networkx.classes.function.freeze(graph)
- self.graph.join = frozen
- self.graph.add_data = frozen
- self.size = len(self.graph)
+ # frm: Original code follows:
+ #
+ # self.graph = networkx.classes.function.freeze(graph)
+ #
+ # # frm: frozen is just a function that raises an exception if called...
+ # self.graph.join = frozen
+ # self.graph.add_data = frozen
+ #
+ # self.size = len(self.graph)
+
+ # frm TODO: Code: Add logic to FrozenGraph so that it is indeed "frozen" (for both NX
+ # and RX)
+ #
+ # I think this just means redefining those methods that change the graph
+ # to return an error / exception if called.
+
+ self.graph = graph
+ self.size = len(self.graph.node_indices)
def __len__(self) -> int:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
return self.size
def __getattribute__(self, __name: str) -> Any:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
try:
return object.__getattribute__(self, __name)
except AttributeError:
- return object.__getattribute__(self.graph, __name)
+ # delegate getting the attribute to the graph data member
+ return self.graph.__getattribute__(__name)
def __getitem__(self, __name: str) -> Any:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
return self.graph[__name]
def __iter__(self) -> Iterable[Any]:
+ """
+
+
+ :param : ...text...
+ ...more text...
+ :type :
+
+ :returns: ...text...
+ :rtype:
+ """
yield from self.node_indices
@functools.lru_cache(16384)
- def neighbors(self, n: Any) -> Tuple[Any, ...]:
- return tuple(self.graph.neighbors(n))
+ def neighbors(self, n: Any) -> tuple[Any, ...]:
+ return self.graph.neighbors(n)
@functools.cached_property
def node_indices(self) -> Iterable[Any]:
@@ -572,9 +2547,5 @@ def edge_indices(self) -> Iterable[Any]:
def degree(self, n: Any) -> int:
return self.graph.degree(n)
- @functools.lru_cache(65536)
- def lookup(self, node: Any, field: str) -> Any:
- return self.graph.nodes[node][field]
-
def subgraph(self, nodes: Iterable[Any]) -> "FrozenGraph":
return FrozenGraph(self.graph.subgraph(nodes))
diff --git a/gerrychain/grid.py b/gerrychain/grid.py
index b635c807..1792801b 100644
--- a/gerrychain/grid.py
+++ b/gerrychain/grid.py
@@ -13,9 +13,13 @@
"""
import math
+from typing import Any, Callable, Dict, Optional, Tuple
+
import networkx
-from gerrychain.partition import Partition
+
from gerrychain.graph import Graph
+from gerrychain.metrics import polsby_popper
+from gerrychain.partition import Partition
from gerrychain.updaters import (
Tally,
boundary_nodes,
@@ -25,8 +29,14 @@
interior_boundaries,
perimeter,
)
-from gerrychain.metrics import polsby_popper
-from typing import Callable, Dict, Optional, Tuple, Any
+
+# frm TODO: Documentation: Clarify what purpose grid.py serves.
+#
+# It is a convenience module to help users create toy graphs. It leverages
+# NX to create graphs, but it returns new Graph objects. So, legacy user
+# code will need to be at least reviewed to make sure that it properly
+# copes with new Graph objects.
+#
class Grid(Partition):
@@ -62,6 +72,20 @@ def __init__(
assignment: Optional[Dict] = None,
updaters: Optional[Dict[str, Callable]] = None,
parent: Optional["Grid"] = None,
+ # frm: ???: TODO: This code indicates that flips are a dict of tuple: int which would be
+ # correct for edge flips, but not for node flips. Need to check again
+ # to see if this is correct. Note that flips is used in the constructor
+ # so it should fall through to Partition._from_parent()...
+ #
+ # OK - I think that this is a bug. Parition._from_parent() assumes
+ # that flips are a mapping from node to partition not tuple/edge to
+ # partition. I checked ALL of the code and the constructor for Grid is
+ # never passed in a flips parameter, so there is no example to
+ # check / verify, but it sure looks and smells like a bug.
+ #
+ # The fix would be to just change Dict[Tuple[int, int], int] to be
+ # Dict[int, int]
+ #
flips: Optional[Dict[Tuple[int, int], int]] = None,
) -> None:
"""
@@ -95,14 +119,18 @@ def __init__(
:raises Exception: If neither dimensions nor parent is provided.
"""
+
+ # Note that Grid graphs have node_ids that are tuples not integers.
+
if dimensions:
self.dimensions = dimensions
- graph = Graph.from_networkx(create_grid_graph(dimensions, with_diagonals))
+ graph = Graph.from_networkx(_create_grid_nx_graph(dimensions, with_diagonals))
if not assignment:
thresholds = tuple(math.floor(n / 2) for n in self.dimensions)
assignment = {
- node: color_quadrants(node, thresholds) for node in graph.nodes # type: ignore
+ node_id: color_quadrants(node_id, thresholds) # type: ignore
+ for node_id in graph.node_indices
}
if not updaters:
@@ -139,7 +167,23 @@ def as_list_of_lists(self):
return [[self.assignment.mapping[(i, j)] for i in range(m)] for j in range(n)]
-def create_grid_graph(dimensions: Tuple[int, int], with_diagonals: bool) -> Graph:
+# frm: Documentation: Document what grid.py is intended to be used for
+#
+# I will need to do some research, but my guess is that there are two use
+# cases:
+#
+# 1) Testing - make it easy to create tests
+# 2) User Code - make it easy for users to play around.
+#
+# For #1, it is OK to have some routines return NX-Graph objects and some to return new Graph
+# objects, but that is probably confusing to users, so the todo list items are:
+#
+# * Decide whether to support returning NX-based Graphs sometimes and new Graphs others,
+# * Document whatever we decide
+#
+
+
+def _create_grid_nx_graph(dimensions: Tuple[int, ...], with_diagonals: bool) -> Graph:
"""
Creates a grid graph with the specified dimensions.
Optionally includes diagonal connections between nodes.
@@ -157,30 +201,41 @@ def create_grid_graph(dimensions: Tuple[int, int], with_diagonals: bool) -> Grap
if len(dimensions) != 2:
raise ValueError("Expected two dimensions.")
m, n = dimensions
- graph = networkx.generators.lattice.grid_2d_graph(m, n)
+ nx_graph = networkx.generators.lattice.grid_2d_graph(m, n)
- networkx.set_edge_attributes(graph, 1, "shared_perim")
+ networkx.set_edge_attributes(nx_graph, 1, "shared_perim")
if with_diagonals:
- nw_to_se = [
- ((i, j), (i + 1, j + 1)) for i in range(m - 1) for j in range(n - 1)
- ]
- sw_to_ne = [
- ((i, j + 1), (i + 1, j)) for i in range(m - 1) for j in range(n - 1)
- ]
+ nw_to_se = [((i, j), (i + 1, j + 1)) for i in range(m - 1) for j in range(n - 1)]
+ sw_to_ne = [((i, j + 1), (i + 1, j)) for i in range(m - 1) for j in range(n - 1)]
diagonal_edges = nw_to_se + sw_to_ne
- graph.add_edges_from(diagonal_edges)
+ # frm: TODO: Check that graph is an NX graph before calling graph.add_edges_from().
+ # Eventually make this work for RX too...
+ nx_graph.add_edges_from(diagonal_edges)
for edge in diagonal_edges:
- graph.edges[edge]["shared_perim"] = 0
+ # frm: TODO: When/if grid.py is converted to operate on GerryChain Graph
+ # objects instead of NX.Graph objects, this use of NX
+ # EdgeView to get/set edge data will need to change to use
+ # gerrychain_graph.edge_data()
+ #
+ # We will also need to think about edge vs edge_id. In this
+ # case we want an edge_id, so that means we need to look at
+ # how diagonal_edges are created - but that is for the future...
+ nx_graph.edges[edge]["shared_perim"] = 0
- networkx.set_node_attributes(graph, 1, "population")
- networkx.set_node_attributes(graph, 1, "area")
+ # frm: These just set all nodes/edges in the graph to have the given attributes with a value
+ # of 1
+ # frm: TODO: These won't work for the new graph, and they won't work for RX
+ networkx.set_node_attributes(nx_graph, 1, "population")
+ networkx.set_node_attributes(nx_graph, 1, "area")
- tag_boundary_nodes(graph, dimensions)
+ _tag_boundary_nodes(nx_graph, dimensions)
- return graph
+ return nx_graph
+# frm ???: Why is this here instead of in graph.py? Who is it intended for?
+# Internal vs. External?
def give_constant_attribute(graph: Graph, attribute: Any, value: Any) -> None:
"""
Sets the specified attribute to the specified value for all nodes in the graph.
@@ -194,11 +249,11 @@ def give_constant_attribute(graph: Graph, attribute: Any, value: Any) -> None:
:returns: None
"""
- for node in graph.nodes:
- graph.nodes[node][attribute] = value
+ for node_id in graph.node_indices:
+ graph.node_data(node_id)[attribute] = value
-def tag_boundary_nodes(graph: Graph, dimensions: Tuple[int, int]) -> None:
+def _tag_boundary_nodes(nx_graph: networkx.Graph, dimensions: Tuple[int, int]) -> None:
"""
Adds the boolean attribute ``boundary_node`` to each node in the graph.
If the node is on the boundary of the grid, that node also gets the attribute
@@ -211,13 +266,30 @@ def tag_boundary_nodes(graph: Graph, dimensions: Tuple[int, int]) -> None:
:returns: None
"""
+ #
+ # frm: Another case of code that is not clear (at least to me). It took me
+ # a while to figure out that the name/label for a node in a grid graph
+ # is a tuple and not just a number or string. The tuple indicates its
+ # position in the grid (x,y) cartesian coordinates, so node[0] below
+ # means its x-position and node[1] means its y-position. So the if-stmt
+ # below tests whether a node is all the way on the left or the right or
+ # all the way on the top or the bottom. If so, it is tagged as a
+ # boundary node and it gets its boundary_perim value set - still not
+ # sure what that does/means...
+ #
+ # Peter's comment from PR:
+ #
+ # I think that being able to identify a boundary edge was needed in some early
+ # experiments, so it was important to tag them, but I haven't really something
+ # that cares about this in a while
+
m, n = dimensions
- for node in graph.nodes:
+ for node in nx_graph.nodes:
if node[0] in [0, m - 1] or node[1] in [0, n - 1]:
- graph.nodes[node]["boundary_node"] = True
- graph.nodes[node]["boundary_perim"] = get_boundary_perim(node, dimensions)
+ nx_graph.nodes[node]["boundary_node"] = True
+ nx_graph.nodes[node]["boundary_perim"] = get_boundary_perim(node, dimensions)
else:
- graph.nodes[node]["boundary_node"] = False
+ nx_graph.nodes[node]["boundary_node"] = False
def get_boundary_perim(node: Tuple[int, int], dimensions: Tuple[int, int]) -> int:
diff --git a/gerrychain/meta/diversity.py b/gerrychain/meta/diversity.py
index e8961ec4..b3580400 100644
--- a/gerrychain/meta/diversity.py
+++ b/gerrychain/meta/diversity.py
@@ -4,6 +4,7 @@
from dataclasses import dataclass
from typing import Iterable, Tuple
+
from gerrychain.partition import Partition
@@ -65,7 +66,7 @@ def collect_diversity_stats(
for partition in chain:
steps_taken += 1
- for district, nodes in partition.assignment.parts.items():
+ for _, nodes in partition.assignment.parts.items():
hashable_nodes = tuple(sorted(list(nodes)))
if hashable_nodes not in seen_districts:
unique_districts += 1
diff --git a/gerrychain/metagraph.py b/gerrychain/metagraph.py
index 438af206..c79dc623 100644
--- a/gerrychain/metagraph.py
+++ b/gerrychain/metagraph.py
@@ -12,21 +12,31 @@
"""
from itertools import product
-from .constraints import Validator
-from typing import Callable, Dict, Iterator, Iterable, Union
+from typing import Callable, Dict, Iterable, Iterator, Union
+
from gerrychain.partition import Partition
+from .constraints import Validator
+
def all_cut_edge_flips(partition: Partition) -> Iterator[Dict]:
"""
Generate all possible flips of cut edges in a partition
without any constraints.
+ This routine finds all edges on the boundary of
+ districts - those that are "cut edges" where one node
+ is in one district and the other node is in another
+ district. These are all of the places where you
+ could move the boundary between districts by moving a single
+ node.
+
:param partition: The partition object.
:type partition: Partition
:returns: An iterator that yields dictionaries representing the flipped edges.
:rtype: Iterator[Dict]
"""
+
for edge, index in product(partition.cut_edges, (0, 1)):
yield {edge[index]: partition.assignment.mapping[edge[1 - index]]}
@@ -80,9 +90,7 @@ def all_valid_flips(
yield state.flips
-def metagraph_degree(
- partition: Partition, constraints: Union[Iterable[Callable], Callable]
-) -> int:
+def metagraph_degree(partition: Partition, constraints: Union[Iterable[Callable], Callable]) -> int:
"""
Calculate the degree of the node in the metagraph of the given partition.
That is to say, compute how many possible valid states are reachable from
diff --git a/gerrychain/metrics/__init__.py b/gerrychain/metrics/__init__.py
index e334b983..d2a7734a 100644
--- a/gerrychain/metrics/__init__.py
+++ b/gerrychain/metrics/__init__.py
@@ -1,9 +1,9 @@
from .compactness import polsby_popper
from .partisan import (
+ efficiency_gap,
mean_median,
partisan_bias,
partisan_gini,
- efficiency_gap,
wasted_votes,
)
diff --git a/gerrychain/metrics/compactness.py b/gerrychain/metrics/compactness.py
index 39c288f8..efa10334 100644
--- a/gerrychain/metrics/compactness.py
+++ b/gerrychain/metrics/compactness.py
@@ -33,8 +33,6 @@ def polsby_popper(partition) -> Dict[int, float]:
:rtype: Dict[int, float]
"""
return {
- part: compute_polsby_popper(
- partition["area"][part], partition["perimeter"][part]
- )
+ part: compute_polsby_popper(partition["area"][part], partition["perimeter"][part])
for part in partition.parts
}
diff --git a/gerrychain/metrics/partisan.py b/gerrychain/metrics/partisan.py
index ef8be5cc..6090498f 100644
--- a/gerrychain/metrics/partisan.py
+++ b/gerrychain/metrics/partisan.py
@@ -6,15 +6,18 @@
with circular imports.
"""
-import numpy
from typing import Tuple
+import numpy
+
+# frm: TODO: Refactoring: Why are these not just included in the file that defines ElectionResults?
+
def mean_median(election_results) -> float:
"""
Computes the Mean-Median score for the given ElectionResults.
A positive value indicates an advantage for the first party listed
- in the Election's parties_to_columns dictionary.
+ in the Election's party_names_to_node_attribute_names dictionary.
:param election_results: An ElectionResults object
:type election_results: ElectionResults
@@ -32,7 +35,7 @@ def mean_thirdian(election_results) -> float:
"""
Computes the Mean-Median score for the given ElectionResults.
A positive value indicates an advantage for the first party listed
- in the Election's parties_to_columns dictionary.
+ in the Election's party_names_to_node_attribute_names dictionary.
The motivation for this score is that the minority party in many
states struggles to win even a third of the seats.
@@ -56,7 +59,7 @@ def efficiency_gap(election_results) -> float:
"""
Computes the efficiency gap for the given ElectionResults.
A positive value indicates an advantage for the first party listed
- in the Election's parties_to_columns dictionary.
+ in the Election's party_names_to_node_attribute_names dictionary.
:param election_results: An ElectionResults object
:type election_results: ElectionResults
@@ -64,9 +67,7 @@ def efficiency_gap(election_results) -> float:
:returns: The efficiency gap for the given ElectionResults
:rtype: float
"""
- party1, party2 = [
- election_results.counts(party) for party in election_results.election.parties
- ]
+ party1, party2 = [election_results.counts(party) for party in election_results.election.parties]
wasted_votes_by_part = map(wasted_votes, party1, party2)
total_votes = election_results.total_votes()
numerator = sum(waste2 - waste1 for waste1, waste2 in wasted_votes_by_part)
diff --git a/gerrychain/optimization/__init__.py b/gerrychain/optimization/__init__.py
index 4192e674..2272d546 100644
--- a/gerrychain/optimization/__init__.py
+++ b/gerrychain/optimization/__init__.py
@@ -1,4 +1,4 @@
-from .optimization import SingleMetricOptimizer
from .gingleator import Gingleator
+from .optimization import SingleMetricOptimizer
__all__ = ["SingleMetricOptimizer", "Gingleator"]
diff --git a/gerrychain/optimization/gingleator.py b/gerrychain/optimization/gingleator.py
index 77399b3c..11fe5635 100755
--- a/gerrychain/optimization/gingleator.py
+++ b/gerrychain/optimization/gingleator.py
@@ -1,11 +1,13 @@
-from .optimization import SingleMetricOptimizer
-
-from functools import partial
-import numpy as np
import warnings
+from functools import partial
from typing import Callable, Iterable, Optional, Union
+
+import numpy as np
+
+from gerrychain.constraints import Bounds, Validator
from gerrychain.partition import Partition
-from gerrychain.constraints import Validator, Bounds
+
+from .optimization import SingleMetricOptimizer
class Gingleator(SingleMetricOptimizer):
@@ -13,9 +15,9 @@ class Gingleator(SingleMetricOptimizer):
`Gingleator` is a child class of `SingleMetricOptimizer` which can be used to search for plans
with increased numbers of Gingles' districts.
- A gingles district (named for the Supreme Court case Thornburg v. Gingles) is a district that is
- majority-minority. aka 50% + 1 of some population subgroup. Demonstrating additional Gingles
- districts is one of the litmus tests used in bringing forth a VRA case.
+ A gingles district (named for the Supreme Court case Thornburg v. Gingles) is a district
+ that is majority-minority. aka 50% + 1 of some population subgroup. Demonstrating additional
+ Gingles districts is one of the litmus tests used in bringing forth a VRA case.
"""
def __init__(
@@ -73,23 +75,18 @@ def __init__(
"`minority_perc_col` and `minority_pop_col` are both specified. By \
default `minority_perc_col` will be used."
)
- score_function = (
- self.num_opportunity_dists if score_function is None else score_function
- )
+ score_function = self.num_opportunity_dists if score_function is None else score_function
if minority_perc_col is None:
perc_up = {
min_perc_column_name: lambda part: {
- k: part[minority_pop_col][k] / part[total_pop_col][k]
- for k in part.parts.keys()
+ k: part[minority_pop_col][k] / part[total_pop_col][k] for k in part.parts.keys()
}
}
initial_state.updaters.update(perc_up)
minority_perc_col = min_perc_column_name
- score = partial(
- score_function, minority_perc_col=minority_perc_col, threshold=threshold
- )
+ score = partial(score_function, minority_perc_col=minority_perc_col, threshold=threshold)
super().__init__(proposal, constraints, initial_state, score, maximize=True)
@@ -145,9 +142,7 @@ def reward_partial_dist(
return num_opport_dists + next_dist
@classmethod
- def reward_next_highest_close(
- cls, part: Partition, minority_perc_col: str, threshold: float
- ):
+ def reward_next_highest_close(cls, part: Partition, minority_perc_col: str, threshold: float):
"""
Given a partition, returns the number of opportunity districts, if no additional district
is within 10% of reaching the threshold. If one is, the distance that district is from the
@@ -176,9 +171,7 @@ def reward_next_highest_close(
return num_opport_dists + (next_dist - threshold + 0.1) * 10
@classmethod
- def penalize_maximum_over(
- cls, part: Partition, minority_perc_col: str, threshold: float
- ):
+ def penalize_maximum_over(cls, part: Partition, minority_perc_col: str, threshold: float):
"""
Given a partition, returns the number of opportunity districts + (1 - the maximum excess)
scaled to between 0 and 1.
@@ -204,9 +197,7 @@ def penalize_maximum_over(
return num_opportunity_dists + (1 - max_dist) / (1 - threshold)
@classmethod
- def penalize_avg_over(
- cls, part: Partition, minority_perc_col: str, threshold: float
- ):
+ def penalize_avg_over(cls, part: Partition, minority_perc_col: str, threshold: float):
"""
Given a partition, returns the number of opportunity districts + (1 - the average excess)
scaled to between 0 and 1.
diff --git a/gerrychain/optimization/optimization.py b/gerrychain/optimization/optimization.py
index 5b45e3c6..4d8fc19e 100644
--- a/gerrychain/optimization/optimization.py
+++ b/gerrychain/optimization/optimization.py
@@ -1,10 +1,12 @@
-from ..chain import MarkovChain
-from ..partition import Partition
-from ..accept import always_accept
+import math
import random
-from typing import Union, Callable, List, Any
+from typing import Any, Callable, List, Union
+
from tqdm import tqdm
-import math
+
+from ..accept import always_accept
+from ..chain import MarkovChain
+from ..partition import Partition
class SingleMetricOptimizer:
@@ -31,26 +33,23 @@ class SingleMetricOptimizer:
def __init__(
self,
proposal: Callable[[Partition], Partition],
- constraints: Union[
- Callable[[Partition], bool], List[Callable[[Partition], bool]]
- ],
+ constraints: Union[Callable[[Partition], bool], List[Callable[[Partition], bool]]],
initial_state: Partition,
optimization_metric: Callable[[Partition], Any],
maximize: bool = True,
step_indexer: str = "step",
):
"""
-
:param proposal: Function proposing the next state from the current state.
:type proposal: Callable
- :param constraints: A function, or lists of functions, determining whether the proposed next
- state is valid (passes all binary constraints). Usually this is a
+ :param constraints: A function, or lists of functions, determining whether the proposed
+ next state is valid (passes all binary constraints). Usually this is a
:class:`~gerrychain.constraints.Validator` class instance.
:type constraints: Union[Callable[[Partition], bool], List[Callable[[Partition], bool]]]
:param initial_state: Initial state of the optimizer.
:type initial_state: Partition
- :param optimization_metric: The score function with which to optimize over. This should have
- the signature: ``Partition -> 'a`` where 'a is comparable.
+ :param optimization_metric: The score function with which to optimize over. This should
+ have the signature: ``Partition -> 'a`` where 'a is comparable.
:type optimization_metric: Callable[[Partition], Any]
:param maximize: Boolean indicating whether to maximize or minimize the function.
Defaults to True for maximize.
@@ -74,9 +73,10 @@ def __init__(
self._step_indexer = step_indexer
if self._step_indexer not in self._initial_part.updaters:
- step_updater = lambda p: (
- 0 if p.parent is None else p.parent[self._step_indexer] + 1
- )
+
+ def step_updater(p: Partition) -> int:
+ return 0 if p.parent is None else p.parent[self._step_indexer] + 1
+
self._initial_part.updaters[self._step_indexer] = step_updater
@property
@@ -189,8 +189,8 @@ def jumpcycle_beta_function(
) -> Callable[[int], float]:
"""
Class method that binds and return simple hot-cold cycle beta temperature function, where
- the chain runs hot for some given duration and then cold for some duration, and repeats that
- cycle.
+ the chain runs hot for some given duration and then cold for some duration, and repeats
+ that cycle.
:param duration_hot: Number of steps to run chain hot.
:type duration_hot: int
@@ -239,11 +239,7 @@ def beta_function(step: int):
elif time_in_cycle < cycle_length - duration_cooldown:
return 1
else:
- return (
- 1
- - (time_in_cycle - cycle_length + duration_cooldown)
- / duration_cooldown
- )
+ return 1 - (time_in_cycle - cycle_length + duration_cooldown) / duration_cooldown
return beta_function
@@ -303,7 +299,8 @@ def logitcycle_beta_function(
cycle_length = duration_hot + 2 * duration_cooldown + duration_cold
# this will scale from 0 to 1 approximately
- logit = lambda x: (math.log(x / (1 - x)) + 5) / 10
+ def logit(x):
+ return (math.log(x / (1 - x)) + 5) / 10
def beta_function(step: int):
time_in_cycle = step % cycle_length
@@ -320,8 +317,7 @@ def beta_function(step: int):
return 1
else:
value = 1 - logit(
- (time_in_cycle - cycle_length + duration_cooldown)
- / duration_cooldown
+ (time_in_cycle - cycle_length + duration_cooldown) / duration_cooldown
)
if value < 0:
return 0
@@ -355,7 +351,8 @@ def logit_jumpcycle_beta_function(
cycle_length = duration_hot + duration_cooldown + duration_cold
# this will scale from 0 to 1 approximately
- logit = lambda x: (math.log(x / (1 - x)) + 5) / 10
+ def logit(x):
+ return (math.log(x / (1 - x)) + 5) / 10
def beta_function(step: int):
time_in_cycle = step % cycle_length
@@ -400,9 +397,7 @@ def short_bursts(
"""
if with_progress_bar:
for part in tqdm(
- self.short_bursts(
- burst_length, num_bursts, accept, with_progress_bar=False
- ),
+ self.short_bursts(burst_length, num_bursts, accept, with_progress_bar=False),
total=burst_length * num_bursts,
):
yield part
@@ -453,9 +448,7 @@ def simulated_annealing(
chain = MarkovChain(
self._proposal,
self._constraints,
- self._simulated_annealing_acceptance_function(
- beta_function, beta_magnitude
- ),
+ self._simulated_annealing_acceptance_function(beta_function, beta_magnitude),
self._initial_part,
num_steps,
)
@@ -505,7 +498,7 @@ def tilted_short_bursts(
with_progress_bar=with_progress_bar,
)
- # TODO: Maybe add a max_time variable so we don't run forever.
+ # TODO: Refactoring: Maybe add a max_time variable so we don't run forever.
def variable_length_short_bursts(
self,
num_steps: int,
diff --git a/gerrychain/partition/__init__.py b/gerrychain/partition/__init__.py
index 7d1d2600..759aea89 100644
--- a/gerrychain/partition/__init__.py
+++ b/gerrychain/partition/__init__.py
@@ -1,4 +1,4 @@
-from .partition import Partition
from .geographic import GeographicPartition
+from .partition import Partition
__all__ = ["Partition", "GeographicPartition"]
diff --git a/gerrychain/partition/assignment.py b/gerrychain/partition/assignment.py
index b38ca49d..e58987f1 100644
--- a/gerrychain/partition/assignment.py
+++ b/gerrychain/partition/assignment.py
@@ -1,10 +1,11 @@
from collections import defaultdict
from collections.abc import Mapping
-from typing import Dict, Union, Optional, DefaultDict, Set, Type
-from ..graph import Graph
+from typing import DefaultDict, Dict, Optional, Set, Type, Union
import pandas
+from ..graph import Graph
+
class Assignment(Mapping):
"""
@@ -20,9 +21,7 @@ class Assignment(Mapping):
__slots__ = ["parts", "mapping"]
- def __init__(
- self, parts: Dict, mapping: Optional[Dict] = None, validate: bool = True
- ) -> None:
+ def __init__(self, parts: Dict, mapping: Optional[Dict] = None, validate: bool = True) -> None:
"""
:param parts: Dictionary mapping partition assignments frozensets of nodes.
:type parts: Dict
@@ -37,6 +36,7 @@ def __init__(
:raises ValueError: if the keys of ``parts`` are not unique
:raises TypeError: if the values of ``parts`` are not frozensets
"""
+
if validate:
number_of_keys = sum(len(keys) for keys in parts.values())
number_of_unique_keys = len(set().union(*parts.values()))
@@ -77,6 +77,14 @@ def update_flows(self, flows):
"""
Update the assignment for some nodes using the given flows.
"""
+ # frm: Update the assignment of nodes to partitions by adding
+ # all of the new nodes and removing all of the old nodes
+ # as represented in the flows (dict keyed by district (part)
+ # of nodes flowing "in" and "out" for that district).
+ #
+ # Also, reset the mapping of node to partition (self.mapping)
+ # to reassign each node to its new partition.
+ #
for part, flow in flows.items():
# Union between frozenset and set returns an object whose type
# matches the object on the left, which here is a frozenset
@@ -119,9 +127,7 @@ def to_series(self) -> pandas.Series:
:returns: The assignment as a :class:`pandas.Series`.
:rtype: pandas.Series
"""
- groups = [
- pandas.Series(data=part, index=nodes) for part, nodes in self.parts.items()
- ]
+ groups = [pandas.Series(data=part, index=nodes) for part, nodes in self.parts.items()]
return pandas.concat(groups)
def to_dict(self) -> Dict:
@@ -146,10 +152,63 @@ def from_dict(cls, assignment: Dict) -> "Assignment":
passed-in dictionary.
:rtype: Assignment
"""
+
+ # frm: TODO: Refactoring: Clean up from_dict().
+ #
+ # A couple of things:
+ # * It uses a routine, level_sets(), which is only ever used here, so
+ # why bother having a separate routine. All it does is convert a dict
+ # mapping node_ids to parts into a dict mapping parts into sets of
+ # node_ids. Why not just have that code here inline?
+ #
+ # * Also, the constructor for Assignment explicitly allows for the caller
+ # to pass in a "mapping" of node_id to part, which we have right here.
+ # Why don't we pass it in and save having to recompute it?
+ #
+
parts = {part: frozenset(keys) for part, keys in level_sets(assignment).items()}
return cls(parts)
+ def new_assignment_convert_old_node_ids_to_new_node_ids(
+ self, node_id_mapping: Dict
+ ) -> "Assignment":
+ """
+ Create a new Assignment object from the one passed in, where the node_ids are changed
+ according to the node_id_mapping from old node_ids to new node_ids.
+
+ This routine was motivated by the fact that node_ids are changed when converting from an
+ NetworkX based graph to a RustworkX based graph. An Assignment based on the node_ids in
+ the NetworkX based graph would need to be changed to use the new node_ids - the new
+ Asignment would be semantically equivalent - just converted to use the new node_ids in
+ the RX based graph.
+
+ The node_id_mapping is of the form {old_node_id: new_node_id}
+ """
+
+ # Dict of the form: {node_id: part_id}
+ old_assignment_mapping = self.mapping
+
+ # convert old_node_ids to new_node_ids, keeping part IDs the same
+ new_assignment_mapping = {
+ node_id_mapping[old_node_id]: part
+ for old_node_id, part in old_assignment_mapping.items()
+ }
+ # Now upate the parts dict that has a frozenset of all the nodes in each part (district)
+ new_parts = {}
+ for cur_node_id, cur_part in new_assignment_mapping.items():
+ if cur_part not in new_parts:
+ new_parts[cur_part] = set()
+ new_parts[cur_part].add(cur_node_id)
+ for cur_part, set_of_nodes in new_parts.items():
+ new_parts[cur_part] = frozenset(set_of_nodes)
+
+ # pandas.Series(data=part, index=nodes) for part, nodes in self.parts.items()
+
+ new_assignment = Assignment(new_parts, new_assignment_mapping)
+
+ return new_assignment
+
def get_assignment(
part_assignment: Union[str, Dict, Assignment], graph: Optional[Graph] = None
@@ -174,13 +233,22 @@ def get_assignment(
is not provided.
:raises TypeError: If the part_assignment is not a string or dictionary.
"""
+
+ # frm: TODO: Refactoring: Think about whether to split this into two functions. AT
+ # present, it does different things based on whether
+ # the "part_assignment" parameter is a string, a dict,
+ # or an assignment. Probably not worth the trouble (possible
+ # legacy issues), but I just can't get used to the Python habit
+ # of weak typing...
+
if isinstance(part_assignment, str):
+ # Extract an assignment using the named node attribute
if graph is None:
raise TypeError(
"You must provide a graph when using a node attribute for the part_assignment"
)
return Assignment.from_dict(
- {node: graph.nodes[node][part_assignment] for node in graph}
+ {node: graph.node_data(node)[part_assignment] for node in graph}
)
# Check if assignment is a dict or a mapping type
elif callable(getattr(part_assignment, "items", None)):
diff --git a/gerrychain/partition/geographic.py b/gerrychain/partition/geographic.py
index 7dc03386..8ffaeb03 100644
--- a/gerrychain/partition/geographic.py
+++ b/gerrychain/partition/geographic.py
@@ -1,4 +1,3 @@
-from gerrychain.partition import Partition
from gerrychain.updaters import (
Tally,
boundary_nodes,
@@ -9,6 +8,8 @@
perimeter,
)
+from .partition import Partition
+
class GeographicPartition(Partition):
"""
diff --git a/gerrychain/partition/partition.py b/gerrychain/partition/partition.py
index 9f484f61..29da4c2b 100644
--- a/gerrychain/partition/partition.py
+++ b/gerrychain/partition/partition.py
@@ -1,12 +1,33 @@
import json
+from typing import Any, Callable, Dict, Optional, Tuple
+
+# frm: Only used in _first_time() inside __init__() to allow for creating
+# a Partition from a NetworkX Graph object:
+#
+# elif isinstance(graph, networkx.Graph):
+# graph = Graph.from_networkx(graph)
+# self.graph = FrozenGraph(graph)
import networkx
from gerrychain.graph.graph import FrozenGraph, Graph
-from ..updaters import compute_edge_flows, flows_from_changes, cut_edges
+
+from ..tree import recursive_tree_part
+from ..updaters import compute_edge_flows, cut_edges, flows_from_changes
from .assignment import get_assignment
from .subgraphs import SubgraphView
-from ..tree import recursive_tree_part
-from typing import Any, Callable, Dict, Optional, Tuple
+
+# frm TODO: Documentation: Add documentation about how this all works. For instance,
+# what is computationally expensive and how does a FrozenGraph
+# help? Why do we need both assignments and parts?
+#
+# Since a Partition is intimately tied up with how the Markov Chain
+# does its magic, it would make sense to talk about that a bit...
+#
+# For instance, is there any reason to use a Partition object
+# except in a Markov Chain? I suppose they are useful for post
+# Markov Chain analysis - but if so, then it would be nice to
+# know what functionality is tuned for the Markov Chain and what
+# functionality / data is tuned for post Markov Chain analysis.
class Partition:
@@ -56,12 +77,24 @@ def __init__(
which the functions compute.
:param use_default_updaters: If `False`, do not include default updaters.
"""
+
if parent is None:
+ if graph is None:
+ raise Exception("Parition.__init__(): graph object is None")
+
self._first_time(graph, assignment, updaters, use_default_updaters)
else:
self._from_parent(parent, flips)
self._cache = dict()
+
+ # frm: SubgraphView provides cached access to subgraphs for each of the
+ # partition's districts. It is important that we asign subgraphs AFTER
+ # we have established what nodes belong to which parts (districts). In
+ # the case when the parent is None, the assignments are explicitly provided,
+ # and in the case when there is a parent, the _from_parent() logic processes
+ # the flips to update the assignments.
+
self.subgraphs = SubgraphView(self.graph, self.parts)
@classmethod
@@ -73,7 +106,6 @@ def from_random_assignment(
pop_col: str,
updaters: Optional[Dict[str, Callable]] = None,
use_default_updaters: bool = True,
- flips: Optional[Dict] = None,
method: Callable = recursive_tree_part,
) -> "Partition":
"""
@@ -92,8 +124,6 @@ def from_random_assignment(
:type updaters: Optional[Dict[str, Callable]], optional
:param use_default_updaters: If `False`, do not include default updaters.
:type use_default_updaters: bool, optional
- :param flips: Dictionary assigning nodes of the graph to their new districts.
- :type flips: Optional[Dict], optional
:param method: The function to use to partition the graph into ``n_parts``. Defaults to
:func:`~gerrychain.tree.recursive_tree_part`.
:type method: Callable, optional
@@ -101,7 +131,9 @@ def from_random_assignment(
:returns: The partition created with a random assignment
:rtype: Partition
"""
- total_pop = sum(graph.nodes[n][pop_col] for n in graph)
+ # frm: TODO: BUG: The param, flips, is never used in this routine...
+
+ total_pop = sum(graph.node_data(n)[pop_col] for n in graph)
ideal_pop = total_pop / n_parts
assignment = method(
@@ -120,18 +152,71 @@ def from_random_assignment(
)
def _first_time(self, graph, assignment, updaters, use_default_updaters):
- if isinstance(graph, Graph):
- self.graph = FrozenGraph(graph)
- elif isinstance(graph, networkx.Graph):
+ # Make sure that the embedded graph for the Partition is based on
+ # a RustworkX graph, and make sure it is also a FrozenGraph. Both
+ # of these are important for performance.
+
+ # Note that we automatically convert NetworkX based graphs to use RustworkX
+ # when we create a Partition object.
+ #
+ # Creating and manipulating NX Graphs is easy and users
+ # are familiar with doing so. It makes sense to preserve the use case of
+ # creating an NX-Graph and then allowing the code to under-the-covers
+ # convert to RX - both for legacy compatibility, but also because NX provides
+ # a really nice and easy way to create graphs.
+ #
+ # TODO: Documentation: update the documentation
+ # to describe the use case of creating a graph using NX. That documentation
+ # should also describe how to post-process results of a MarkovChain run
+ # but I haven't figured that out yet...
+
+ # If a NX.Graph, create a Graph object based on NX
+ if isinstance(graph, networkx.Graph):
graph = Graph.from_networkx(graph)
+
+ # if a Graph object, make sure it is based on an embedded RustworkX.PyGraph
+ if isinstance(graph, Graph):
+ # frm: TODO: Performance: Remove this short-term hack to do performance testing
+ #
+ # This "test_performance_using_NX_graph" hack just forces the partition
+ # to NOT convert the NX graph to be RX based. This allows me to
+ # compare RX performance to NX performance with the same code - so that
+ # whatever is different is crystal clear.
+ test_performance_using_NX_graph = False
+ if (graph.is_nx_graph()) and test_performance_using_NX_graph:
+ self.assignment = get_assignment(assignment, graph)
+ print("=====================================================")
+ print("Performance-Test: using NetworkX for Partition object")
+ print("=====================================================")
+
+ elif graph.is_nx_graph():
+
+ # Get the assignment that would be appropriate for the NX-based graph
+ old_nx_assignment = get_assignment(assignment, graph)
+
+ # Convert the NX graph to be an RX graph
+ graph = graph.convert_from_nx_to_rx()
+
+ # After converting from NX to RX, we need to update the Partition's assignment
+ # because it used the old NX node_ids (converting to RX changes node_ids)
+ nx_to_rx_node_id_map = graph.get_nx_to_rx_node_id_map()
+ rx_assign = old_nx_assignment.new_assignment_convert_old_node_ids_to_new_node_ids(
+ nx_to_rx_node_id_map
+ )
+ self.assignment = rx_assign
+
+ else:
+ self.assignment = get_assignment(assignment, graph)
+
self.graph = FrozenGraph(graph)
+
elif isinstance(graph, FrozenGraph):
self.graph = graph
+ self.assignment = get_assignment(assignment, graph)
+
else:
raise TypeError(f"Unsupported Graph object with type {type(graph)}")
- self.assignment = get_assignment(assignment, graph)
-
if set(self.assignment) != set(graph):
raise KeyError("The graph's node labels do not match the Assignment's keys")
@@ -145,11 +230,25 @@ def _first_time(self, graph, assignment, updaters, use_default_updaters):
self.updaters.update(updaters)
+ # Note that the updater functions are executed lazily - that is, only when
+ # a caller asks for the results, such as partition["perimeter"]. See the code
+ # for __getitem__().
+ #
+ # So no need to execute the updater functions now...
+
self.parent = None
self.flips = None
self.flows = None
self.edge_flows = None
+ # frm ???: This is only called once and it is tagged as an internal
+ # function (leading underscore). Is there a good reason
+ # why this is not internal to the __init__() routine
+ # where it is used?
+ #
+ # That is, is there any reason why anyone might ever
+ # call this except __init__()?
+
def _from_parent(self, parent: "Partition", flips: Dict) -> None:
self.parent = parent
self.flips = flips
@@ -173,7 +272,7 @@ def __repr__(self):
def __len__(self):
return len(self.parts)
- def flip(self, flips: Dict) -> "Partition":
+ def flip(self, flips: Dict, use_original_nx_node_ids=False) -> "Partition":
"""
Returns the new partition obtained by performing the given `flips`
on this partition.
@@ -182,6 +281,32 @@ def flip(self, flips: Dict) -> "Partition":
:returns: the new :class:`Partition`
:rtype: Partition
"""
+
+ # frm: TODO: Documentation: Change comments above to document new optional parameter,
+ # use_original_nx_node_ids.
+ #
+ # This is a new issue that arises from the fact that node_ids in RX are different from
+ # those in the original NX graph. In the pre-RX code, we did not need to distinguish
+ # between calls to flip() that were internal code used when doing a MarkovChain versus
+ # user code for instance in tests. However, in the new RX world, the internal code uses
+ # RX node_ids and the tests want to use "original" NX node_ids. Hence the new parameter.
+
+ # If the caller identified flips in terms of "original" node_ids (typically node_ids
+ # associated with an NX-based graph before creating a Partition object), then translate
+ # those original node_ids into the appropriate internal RX-based node_ids.
+ #
+ # Note that original node_ids in flips are typically used in tests
+ #
+
+ if use_original_nx_node_ids:
+ new_flips = {}
+ for original_nx_node_id, part in flips.items():
+ internal_node_id = self.graph.internal_node_id_for_original_nx_node_id(
+ original_nx_node_id
+ )
+ new_flips[internal_node_id] = part
+ flips = new_flips
+
return self.__class__(parent=self, flips=flips)
def crosses_parts(self, edge: Tuple) -> bool:
@@ -205,11 +330,52 @@ def __getitem__(self, key: str) -> Any:
:returns: The value of the updater.
:rtype: Any
"""
+ # frm: Cleverness Alert: Delayed evaluation of updater functions...
+ #
+ # The code immediately below executes the appropriate updater function
+ # if it has not already been executed and then caches the results.
+ # This makes sense - why compute something if nobody ever wants it,
+ # but it took me a while to figure out why the constructor did not
+ # explicitly call the updaters.
+ #
+
if key not in self._cache:
+ # frm: TODO: Testing: Add a test checking what happens if no updater defined
+ #
+ # This code checks that the desired updater actually is
+ # defined in the list of updaters. If not, then this
+ # would produce a perhaps difficult to debug problem...
+ if key not in self.updaters:
+ raise KeyError(
+ f"__getitem__(): updater: {key} not defined in the updaters for the partition"
+ )
+
self._cache[key] = self.updaters[key](self)
return self._cache[key]
def __getattr__(self, key):
+ # frm TODO: Refactor: Not sure it makes sense to allow two ways to accomplish the same
+ # thing...
+ #
+ # The code below allows Partition users to get the results of updaters by just
+ # doing: partition. which is the same as doing: partition[""]
+ # It is clever, but perhaps too clever. Why provide two ways to do the same thing?
+ #
+ # It is also odd on a more general level - this approach means that the attributes of a
+ # Partition are the same as the names of the updaters and return the results of running
+ # the updater functions. I guess this makes sense, but there is no documentation (that I
+ # am aware of) that makes this clear.
+ #
+ # Peter's comment in PR:
+ #
+ # This is actually on my list of things that I would prefer removed. When I first
+ # started working with this codebase, I found the fact that you could just do
+ # partition.name_of_my_updater really confusing, and, from a Python perspective,
+ # I think that the more intuitive interface is keyword access like in a dictionary.
+ # I haven't scoured the codebase for instances of ".attr" yet, but this is one of
+ # the things that I am 100% okay with getting rid of. Almost all of the people
+ # that I have seen work with this package use the partition["attr"] paradigm anyway.
+ #
return self[key]
def keys(self):
@@ -220,6 +386,15 @@ def parts(self):
return self.assignment.parts
def plot(self, geometries=None, **kwargs):
+ #
+ # frm ???: I think that this plots districts on a map that is defined
+ # by the geometries parameter (presumably polygons or something similar).
+ # It converts the partition data into data that the plot routine
+ # knows how to deal with, but essentially it just assigns each node
+ # to a district. the **kwargs are then passed to the plotting
+ # engine - presumably to define colors and other graph stuff.
+ #
+
"""
Plot the partition, using the provided geometries.
@@ -236,18 +411,17 @@ def plot(self, geometries=None, **kwargs):
import geopandas
if geometries is None:
- geometries = self.graph.geometry
+ if hasattr(self.graph, "geometry"):
+ geometries = self.graph.geometry
+ else:
+ raise Exception("Partition.plot: graph has no geometry data")
- if set(geometries.index) != set(self.graph.nodes):
- raise TypeError(
- "The provided geometries do not match the nodes of the graph."
- )
+ if set(geometries.index) != self.graph.node_indices:
+ raise TypeError("The provided geometries do not match the nodes of the graph.")
assignment_series = self.assignment.to_series()
if isinstance(geometries, geopandas.GeoDataFrame):
geometries = geometries.geometry
- df = geopandas.GeoDataFrame(
- {"assignment": assignment_series}, geometry=geometries
- )
+ df = geopandas.GeoDataFrame({"assignment": assignment_series}, geometry=geometries)
return df.plot(column="assignment", **kwargs)
@classmethod
@@ -285,13 +459,17 @@ def from_districtr_file(
id_column_key = districtr_plan["idColumn"]["key"]
districtr_assignment = districtr_plan["assignment"]
try:
- node_to_id = {node: str(graph.nodes[node][id_column_key]) for node in graph}
+ node_to_id = {node: str(graph.node_data(node)[id_column_key]) for node in graph}
except KeyError:
raise TypeError(
"The provided graph is missing the {} column, which is "
"needed to match the Districtr assignment to the nodes of the graph."
)
- assignment = {node: districtr_assignment[node_to_id[node]] for node in graph}
+ # frm: TODO: Testing: Verify that there is a test for from_districtr_file()
+
+ assignment = {
+ node_id: districtr_assignment[node_to_id[node_id]] for node_id in graph.node_indices
+ }
return cls(graph, assignment, updaters)
diff --git a/gerrychain/partition/subgraphs.py b/gerrychain/partition/subgraphs.py
index b282a510..8d23d252 100644
--- a/gerrychain/partition/subgraphs.py
+++ b/gerrychain/partition/subgraphs.py
@@ -1,4 +1,5 @@
-from typing import List, Any, Tuple
+from typing import Any, List, Tuple
+
from ..graph import Graph
diff --git a/gerrychain/proposals/__init__.py b/gerrychain/proposals/__init__.py
index 30e0d311..1a9474db 100644
--- a/gerrychain/proposals/__init__.py
+++ b/gerrychain/proposals/__init__.py
@@ -1,6 +1,6 @@
from .proposals import *
-from .tree_proposals import recom, reversible_recom, ReCom
from .spectral_proposals import spectral_recom
+from .tree_proposals import recom, reversible_recom
__all__ = [
"recom",
diff --git a/gerrychain/proposals/proposals.py b/gerrychain/proposals/proposals.py
index 988c7467..d38adfc3 100644
--- a/gerrychain/proposals/proposals.py
+++ b/gerrychain/proposals/proposals.py
@@ -66,8 +66,7 @@ def propose_chunk_flip(partition: Partition) -> Partition:
valid_flips = [
nbr
for nbr in partition.graph.neighbors(flipped_node)
- if partition.assignment.mapping[nbr]
- != partition.assignment.mapping[flipped_node]
+ if partition.assignment.mapping[nbr] != partition.assignment.mapping[flipped_node]
]
for flipped_neighbor in valid_flips:
@@ -111,17 +110,14 @@ def slow_reversible_propose_bi(partition: Partition) -> Partition:
:rtype: Partition
"""
- b_nodes = {x[0] for x in partition["cut_edges"]}.union(
- {x[1] for x in partition["cut_edges"]}
+ b_nodes = {edge[0] for edge in partition["cut_edges"]}.union(
+ {edge[1] for edge in partition["cut_edges"]}
)
flip = random.choice(list(b_nodes))
neighbor_assignments = list(
set(
- [
- partition.assignment.mapping[neighbor]
- for neighbor in partition.graph.neighbors(flip)
- ]
+ [partition.assignment.mapping[neighbor] for neighbor in partition.graph.neighbors(flip)]
)
)
neighbor_assignments.remove(partition.assignment.mapping[flip])
@@ -147,9 +143,7 @@ def slow_reversible_propose(partition: Partition) -> Partition:
:rtype: Partition
"""
- b_nodes = {
- (x[0], partition.assignment.mapping[x[1]]) for x in partition["cut_edges"]
- }.union(
+ b_nodes = {(x[0], partition.assignment.mapping[x[1]]) for x in partition["cut_edges"]}.union(
{(x[1], partition.assignment.mapping[x[0]]) for x in partition["cut_edges"]}
)
diff --git a/gerrychain/proposals/spectral_proposals.py b/gerrychain/proposals/spectral_proposals.py
index 3c213a94..66f121a2 100644
--- a/gerrychain/proposals/spectral_proposals.py
+++ b/gerrychain/proposals/spectral_proposals.py
@@ -1,57 +1,96 @@
-import networkx as nx
-from numpy import linalg as LA
import random
+from typing import Dict, Optional
+
+from numpy import linalg as LA
+
from ..graph import Graph
from ..partition import Partition
-from typing import Dict, Optional
-def spectral_cut(
- graph: Graph, part_labels: Dict, weight_type: str, lap_type: str
-) -> Dict:
+# frm: only ever used in this file - but maybe it is used externally?
+def spectral_cut(subgraph: Graph, part_labels: Dict, weight_type: str, lap_type: str) -> Dict:
"""
Spectral cut function.
- Uses the signs of the elements in the Fiedler vector of a graph to
+ Uses the signs of the elements in the Fiedler vector of a subgraph to
partition into two components.
- :param graph: The graph to be partitioned.
- :type graph: Graph
- :param part_labels: The current partition of the graph.
+ :param subgraph: The subgraph to be partitioned.
+ :type subgraph: Graph
+ :param part_labels: The current partition of the subgraph.
:type part_labels: Dict
:param weight_type: The type of weight to be used in the Laplacian.
:type weight_type: str
:param lap_type: The type of Laplacian to be used.
:type lap_type: str
- :returns: A dictionary assigning nodes of the graph to their new districts.
+ :returns: A dictionary assigning nodes of the subgraph to their new districts.
:rtype: Dict
"""
- nlist = list(graph.nodes())
- n = len(nlist)
+ # This routine operates on subgraphs, which is important because the node_ids
+ # in a subgraph are different from the node_ids of the parent graph, so
+ # the return value's node_ids need to be translated back into the appropriate
+ # parent node_ids.
+
+ node_list = list(subgraph.node_indices)
+ num_nodes = len(node_list)
if weight_type == "random":
- for edge in graph.edge_indices:
- graph.edges[edge]["weight"] = random.random()
+ # assign a random weight to each edge in the subgraph
+ for edge_id in subgraph.edge_indices:
+ subgraph.edge_data(edge_id)["weight"] = random.random()
+ # Compute the desired laplacian matrix (convert from sparse to dense)
if lap_type == "normalized":
- LAP = (nx.normalized_laplacian_matrix(graph)).todense()
-
+ laplacian_matrix = (subgraph.normalized_laplacian_matrix()).todense()
else:
- LAP = (nx.laplacian_matrix(graph)).todense()
-
- NLMva, NLMve = LA.eigh(LAP)
- NFv = NLMve[:, 1]
- xNFv = [NFv.item(x) for x in range(n)]
-
- node_color = [xNFv[x] > 0 for x in range(n)]
-
- clusters = {nlist[x]: part_labels[node_color[x]] for x in range(n)}
-
- return clusters
-
-
+ laplacian_matrix = (subgraph.laplacian_matrix()).todense()
+
+ # frm TODO: Documentation: Add a better explanation for why eigenvectors are useful
+ # for determining flips. Perhaps just a URL to an article
+ # somewhere...
+ #
+ # I have added comments to describe the nuts and bolts of what is happening,
+ # but the overall rationale for this code is missing - and it should be here...
+
+ # LA.eigh(laplacian_matrix) call invokes the eigh() function from
+ # the Numpy LinAlg module which:
+ #
+ # "returns the eigenvalues and eigenvectors of a complex Hermitian
+ # ... or a real symmetrix matrix."
+ #
+ # In our case we have a symmetric matrix, so it returns two
+ # objects - a 1-D numpy array containing the eigenvalues (which we don't
+ # care about) and a 2-D numpy square matrix of the eigenvectors.
+ _, numpy_eigen_vectors = LA.eigh(laplacian_matrix)
+
+ # Extract an eigenvector as a numpy array
+ # frm: ???: Not sure why we want just one of them...
+ numpy_eigen_vector = numpy_eigen_vectors[
+ :, 1
+ ] # frm: ??? I think that this is an eigenvector...
+
+ # Convert to an array of normal Python numbers (not numpy based)
+ eigen_vector_array = [numpy_eigen_vector.item(x) for x in range(num_nodes)]
+
+ # node_color will be True or False depending on whether the value in the
+ # eigen_vector_array is positive or negative. In the code below, this
+ # is equivalent to node_color being 1 or 0 (since Python treats True as 1
+ # and False as 0)
+ node_color = [eigen_vector_array[x] > 0 for x in range(num_nodes)]
+
+ # Create flips using the node_color to select which part (district) to assign
+ # to the node.
+ flips = {node_list[x]: part_labels[node_color[x]] for x in range(num_nodes)}
+
+ # translate subgraph node_ids in flips to parent_graph node_ids
+ translated_flips = subgraph.translate_subgraph_node_ids_for_flips(flips)
+
+ return translated_flips
+
+
+# frm: only ever used in this file - but maybe it is used externally?
def spectral_recom(
partition: Partition,
weight_type: Optional[str] = None,
@@ -88,16 +127,20 @@ def spectral_recom(
:rtype: Partition
"""
- edge = random.choice(tuple(partition["cut_edges"]))
+ # Select two adjacent parts (districts) at random by first selecting
+ # a cut_edge at random and then figuring out the parts (districts)
+ # associated with the edge.
+ cut_edge = random.choice(tuple(partition["cut_edges"]))
parts_to_merge = (
- partition.assignment.mapping[edge[0]],
- partition.assignment.mapping[edge[1]],
+ partition.assignment.mapping[cut_edge[0]],
+ partition.assignment.mapping[cut_edge[1]],
)
- subgraph = partition.graph.subgraph(
- partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
- )
+ subgraph_nodes = partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
- flips = spectral_cut(subgraph, parts_to_merge, weight_type, lap_type)
+ # Cut the set of all nodes from parts_to_merge into two hopefully new parts (districts)
+ flips = spectral_cut(
+ partition.graph.subgraph(subgraph_nodes), parts_to_merge, weight_type, lap_type
+ )
return partition.flip(flips)
diff --git a/gerrychain/proposals/tree_proposals.py b/gerrychain/proposals/tree_proposals.py
index e66a718b..9c41ab5d 100644
--- a/gerrychain/proposals/tree_proposals.py
+++ b/gerrychain/proposals/tree_proposals.py
@@ -1,20 +1,22 @@
+import random
from functools import partial
from inspect import signature
-import random
+from typing import Callable, Dict, Optional, Union
from gerrychain.partition import Partition
+
from ..tree import (
- epsilon_tree_bipartition,
+ ReselectException,
bipartition_tree,
bipartition_tree_random,
- _bipartition_tree_random_all,
- uniform_spanning_tree,
+ bipartition_tree_random_with_num_cuts,
+ epsilon_tree_bipartition,
find_balanced_edge_cuts_memoization,
- ReselectException,
+ uniform_spanning_tree,
)
-from typing import Callable, Optional, Dict, Union
+# frm: only used in this file
class MetagraphError(Exception):
"""
Raised when the partition we are trying to split is a low degree
@@ -24,6 +26,7 @@ class MetagraphError(Exception):
pass
+# frm: only used in this file
class ValueWarning(UserWarning):
"""
Raised whe a particular value is technically valid, but may
@@ -89,6 +92,7 @@ def recom(
:type method: Callable, optional
:returns: The new partition resulting from the ReCom algorithm.
+ print("bipartition_tree: updating restarts and attempts")
:rtype: Partition
"""
@@ -101,7 +105,12 @@ def recom(
method = partial(method, region_surcharge=region_surcharge)
while len(bad_district_pairs) < tot_pairs:
+ # frm: In no particular order, try to merge and then split pairs of districts
+ # that have a cut_edge - meaning that they are adjacent, until you either
+ # find one that can be split, or you have tried all possible pairs
+ # of adjacent districts...
try:
+ # frm: TODO: Refactoring: see if there is some way to avoid a while True loop...
while True:
edge = random.choice(tuple(partition["cut_edges"]))
# Need to sort the tuple so that the order is consistent
@@ -115,12 +124,11 @@ def recom(
if tuple(parts_to_merge) not in bad_district_pairs:
break
- subgraph = partition.graph.subgraph(
- partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
- )
+ # frm: Note that the vertical bar operator merges the two sets into one set.
+ subgraph_nodes = partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
flips = epsilon_tree_bipartition(
- subgraph.graph,
+ partition.graph.subgraph(subgraph_nodes),
parts_to_merge,
pop_col=pop_col,
pop_target=pop_target,
@@ -132,6 +140,7 @@ def recom(
except Exception as e:
if isinstance(e, ReselectException):
+ # frm: Add this pair to list of pairs that did not work...
bad_district_pairs.add(tuple(parts_to_merge))
continue
else:
@@ -176,6 +185,7 @@ def reversible_recom(
:param balance_edge_fn: The balance edge function. Default is
find_balanced_edge_cuts_memoization.
:type balance_edge_fn: Callable, optional
+ frm: it returns a list of Cuts - a named tuple defined in tree.py
:param M: The maximum number of balance edges. Default is 1.
:type M: int, optional
:param repeat_until_valid: Flag indicating whether to repeat until a valid partition is
@@ -189,18 +199,13 @@ def reversible_recom(
"""
def dist_pair_edges(part, a, b):
+ # frm: Find all edges that cross from district a into district b
return set(
e
for e in part.graph.edges
if (
- (
- part.assignment.mapping[e[0]] == a
- and part.assignment.mapping[e[1]] == b
- )
- or (
- part.assignment.mapping[e[0]] == b
- and part.assignment.mapping[e[1]] == a
- )
+ (part.assignment.mapping[e[0]] == a and part.assignment.mapping[e[1]] == b)
+ or (part.assignment.mapping[e[0]] == b and part.assignment.mapping[e[1]] == a)
)
)
@@ -212,6 +217,9 @@ def bounded_balance_edge_fn(*args, **kwargs):
)
return cuts
+ """
+ frm: Original Code:
+
bipartition_tree_random_reversible = partial(
_bipartition_tree_random_all,
repeat_until_valid=repeat_until_valid,
@@ -219,34 +227,93 @@ def bounded_balance_edge_fn(*args, **kwargs):
balance_edge_fn=bounded_balance_edge_fn,
)
+ I deemed this code to be evil, if only because it used an internal tree.py routine
+ _bipartition_tree_random_all(). This internal routine returns a set of Cut objects
+ which otherwise never appear outside tree.py, so this just adds complexity.
+
+ The only reason the original code used _bipartition_tree_random_all() instead of just
+ using bipartition_tree_random() is that it needs to know how many possible new
+ districts there are. So, I created a new function in tree.py that does EXACTLY
+ what bipartition_tree_random() does but which also returns the number of possible
+ new districts.
+
+ """
+ bipartition_tree_random_reversible = partial(
+ bipartition_tree_random_with_num_cuts,
+ repeat_until_valid=repeat_until_valid,
+ spanning_tree_fn=uniform_spanning_tree,
+ balance_edge_fn=bounded_balance_edge_fn,
+ )
+
parts = sorted(list(partition.parts.keys()))
dist_pairs = []
for out_part in parts:
for in_part in parts:
dist_pairs.append((out_part, in_part))
+ # frm: TODO: Code: ???: Grok why this code considers pairs that are the same part...
+ #
+ # For instance, if there are only two parts (districts), then this code will
+ # produce four pairs: (0,0), (0,1), (1,0), (1,1). The code below tests
+ # to see if there is any adjacency, but there will never be adjacency between
+ # the same part (district). Why not just prune out all pairs that have the
+ # same two values and save an interation of the entire chain?
+ #
+ # Stated differently, is there any value in doing an entire chain iteration
+ # when we randomly select the same part (district) to merge with itself???
+ #
+ # A similar issue comes up if there are no pair_edges (below). We waste
+ # an entire iteration in that case too - which seems kind of dumb...
+ #
random_pair = random.choice(dist_pairs)
pair_edges = dist_pair_edges(partition, *random_pair)
if random_pair[0] == random_pair[1] or not pair_edges:
return partition # self-loop: no adjacency
+ # frm: TODO: Code: ???: Grok why it is OK to return the partition unchanged as the next step.
+ #
+ # This runs the risk of running an entire chain without ever changing the partition.
+ # I assume that the logic is that there is deliberate randomness introduced each time,
+ # so eventually, if it is possible, the chain will get started, but it seems like there
+ # should be some kind of check to see if it doesn't ever get started, so that the
+ # user can have a clue about what is going on...
+
edge = random.choice(list(pair_edges))
parts_to_merge = (
partition.assignment.mapping[edge[0]],
partition.assignment.mapping[edge[1]],
)
- subgraph = partition.graph.subgraph(
- partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
- )
-
- all_cuts = bipartition_tree_random_reversible(
- subgraph, pop_col=pop_col, pop_target=pop_target, epsilon=epsilon
+ # Remember node_ids from which subgraph was created - we will need them below
+ subgraph_nodes = partition.parts[parts_to_merge[0]] | partition.parts[parts_to_merge[1]]
+
+ # frm: Note: This code has changed to make sure we don't access subgraph node_ids.
+ # The former code saved the subgraph and used its nodes to compute
+ # the remaining_nodes, but this doesn't work with RX, because the
+ # node_ids for the subgraph are different from those in the parent graph.
+ # The solution is to just remember the parent node_ids that were used
+ # to create the subgraph, and to move the subgraph call in as an actual
+ # parameter, so that after the call there is no way to reference it.
+ #
+ # Going forward, this should be a coding style - only invoke Graph.subgraph()
+ # as an actual parameter so that there is no way to inadvertently access
+ # the subgraph's node_ids afterwards.
+ #
+
+ result = bipartition_tree_random_reversible(
+ partition.graph.subgraph(subgraph_nodes),
+ pop_col=pop_col,
+ pop_target=pop_target,
+ epsilon=epsilon,
)
- if not all_cuts:
+ if not result:
return partition # self-loop: no balance edge
- nodes = choice(all_cuts).subset
- remaining_nodes = set(subgraph.nodes()) - set(nodes)
+ num_possible_districts, nodes = result
+
+ remaining_nodes = subgraph_nodes - set(nodes)
+ # Note: Clever way to create a single dictionary from
+ # two dictionaries - the ** operator unpacks each dictionary
+ # and then they get merged into a new dictionary.
flips = {
**{node: parts_to_merge[0] for node in nodes},
**{node: parts_to_merge[1] for node in remaining_nodes},
@@ -255,10 +322,10 @@ def bounded_balance_edge_fn(*args, **kwargs):
new_part = partition.flip(flips)
seam_length = len(dist_pair_edges(new_part, *random_pair))
- prob = len(all_cuts) / (M * seam_length)
+ prob = num_possible_districts / (M * seam_length)
if prob > 1:
raise ReversibilityError(
- f"Found {len(all_cuts)} balance edges, but "
+ f"Found {len(result) if result is not None else 0} balance edges, but "
f"the upper bound (with seam length 1) is {M}."
)
if random.random() < prob:
@@ -267,6 +334,24 @@ def bounded_balance_edge_fn(*args, **kwargs):
return partition # self-loop
+# frm TODO: Refactoring: I do not think that ReCom() is ever called. Note that it
+# only defines a constructor and a __call__() which would allow
+# you to call the recom() function by creating a ReCom object and then
+# "calling" that object - why not just call the recom function?
+#
+# ...confused...
+#
+# My guess is that someone started writing this code thinking that
+# a class would make sense but then realized that the only use
+# was to call the recom() function but never went back to remove
+# the class. In short, I think that we should probably remove the
+# class and just keep the function...
+#
+# What Peter said in a PR:
+#
+# Another bit of legacy code. I am also not sure why this exists. Seems like
+# there were plans for this and then it got dropped when someone graduated
+#
class ReCom:
"""
ReCom (short for ReCombination) is a class that represents a ReCom proposal
@@ -300,9 +385,7 @@ def __init__(
self.method = method
def __call__(self, partition: Partition):
- return recom(
- partition, self.pop_col, self.ideal_pop, self.epsilon, method=self.method
- )
+ return recom(partition, self.pop_col, self.ideal_pop, self.epsilon, method=self.method)
class ReversibilityError(Exception):
diff --git a/gerrychain/tree.py b/gerrychain/tree.py
index 06ce8433..b6b6a019 100644
--- a/gerrychain/tree.py
+++ b/gerrychain/tree.py
@@ -12,9 +12,9 @@
and methods for assessing and modifying this data.
- Functions for finding balanced edge cuts in a populated graph, either through
contraction or memoization techniques.
-- A suite of functions (`bipartition_tree`, `recursive_tree_part`, `get_seed_chunks`, etc.)
+- A suite of functions (`bipartition_tree`, `recursive_tree_part`, `_get_seed_chunks`, etc.)
for partitioning graphs into balanced subsets based on population targets and tolerances.
-- Utility functions like `get_max_prime_factor_less_than` and `recursive_seed_part_inner`
+- Utility functions like `get_max_prime_factor_less_than` and `_recursive_seed_part_inner`
to assist in complex partitioning tasks.
Dependencies:
@@ -24,120 +24,352 @@
- typing: Used for type hints.
Last Updated: 25 April 2024
-"""
-import networkx as nx
-from networkx.algorithms import tree
+frm: This file, tree.py, needed to be modified to operate on new Graph
+ objects instead of NetworkX Graph objects because the routines are
+ used by the Graph objects inside a Partion, which will soon be based
+ on RustworkX. More specifically, these routines are used by Proposals,
+ and we will soon switch to having the underlying Graph object used
+ in Partitions and Proposals be based on RustworkX.
+
+ It may be the case that they are ONLY ever used by Proposals and
+ hence could just have been rewritten to operate on RustworkX Graph
+ objects, but there seemed to be no harm in having them work either
+ way. It was also a good proving ground for testing whether the new
+ Graph object could behave like a NetworkX Graph object (in terms of
+ attribute access and syntax).
+
+frm: RX Documentation
+
+Many of the functions in this file operate on subgraphs which are different from
+NX subgraphs because the node_ids change in the subgraph. To deal with this,
+in graph.py we have a _node_id_to_parent_node_id_map data member for Graph objects which maps
+the node_ids in a subgraph to the corresponding node_id in its parent graph. This
+will allow routines operating on subgraphs to return results using the node_ids
+of the parent graph.
+
+Note that for top-level graphs, we still define this _node_id_to_parent_node_id_map, but in
+this case it is an identity map that just maps each node_id to itself. This allows
+code to always translate correctly, even if operating on a top-level graph.
+
+As a matter of coding convention, all calls to graph.subgraph() have been placed
+in the actual parameter list of function calls. This limits the scope of the
+subgraph node_ids to the called function - eliminating the risk of those node_ids
+leaking into surrounding code. Stated differently, this eliminates the cognitive
+load of trying to remember whether a node_id is a parent or a subgraph node_id.
+"""
-from functools import partial
-from inspect import signature
+import itertools
import random
+import warnings
from collections import deque, namedtuple
-import itertools
+from functools import partial
+from inspect import signature
from typing import (
Any,
Callable,
Dict,
+ Hashable,
List,
Optional,
- Set,
- Union,
- Hashable,
Sequence,
+ Set,
Tuple,
+ Union,
)
-import warnings
-
-
-def predecessors(h: nx.Graph, root: Any) -> Dict:
- return {a: b for a, b in nx.bfs_predecessors(h, root)}
-
-def successors(h: nx.Graph, root: Any) -> Dict:
- return {a: b for a, b in nx.bfs_successors(h, root)}
-
-
-def random_spanning_tree(
- graph: nx.Graph, region_surcharge: Optional[Dict] = None
-) -> nx.Graph:
+import networkx as nx
+import networkx.algorithms.tree as nxtree
+import rustworkx as rx
+
+# frm: import the new Graph object which encapsulates NX and RX Graph...
+from .graph import Graph
+
+# frm: TODO: Refactoring: Remove import of networkx and rustworkx once we have moved networkx
+# dependencies out of this file - see comments below on
+# spanning trees.
+
+
+# frm: TODO: Refactoring Remove import of "tree" from networkx.algorithms in this file
+# It is only used to get a spanning tree function:
+#
+# spanning_tree = nxtree.minimum_spanning_tree(
+#
+# There is an RX function that also computes a spanning tree - hopefully
+# it works as we want it to work and hence can be used.
+#
+# I think it probably makes sense to move this spanning tree function
+# into graph.py and to encapsulate the NX vs RX code there.
+#
+# Note Peter agrees with this...
+
+
+# frm TODO: Documentation: Update function param docmentation to get rid of nx.Graph and use
+# just Graph
+
+# frm TODO: Documentation: Migration Guide: tree.py is no longer a general purpose module - it is
+# GerryChain specific
+#
+# Before the work to integrate RX, many of the routines ij tree.py
+# operated on NetworkX Graph objects, which meant that the module
+# was not bound to just GerryChain work - someone could conceivably
+# have used it for a graph oriented project that had nothing to do
+# with GerryChain or redistricting.
+#
+# That is no lnoger true, as the parameters to the routines have
+# been changed to be GerryChain Graph objects which are not subclasses
+# of NetworkX Graph objects.
+
+
+def random_spanning_tree(graph: Graph, region_surcharge: Optional[Dict] = None) -> Graph:
"""
Builds a spanning tree chosen by Kruskal's method using random weights.
- :param graph: The input graph to build the spanning tree from. Should be a Networkx Graph.
- :type graph: nx.Graph
+ :param graph: The input graph to build the spanning tree from.
+ :type graph: Graph
:param region_surcharge: Dictionary of surcharges to add to the random
weights used in region-aware variants.
:type region_surcharge: Optional[Dict], optional
- :returns: The maximal spanning tree represented as a Networkx Graph.
- :rtype: nx.Graph
+ :returns: The maximal spanning tree represented as a GerryChain Graph.
+ :rtype: Graph
+ """
+ # frm: TODO: Performance
+ # This seems to me to be an expensive way to build a random spanning
+ # tree. It calls a routine to compute a "minimal" spanning tree that
+ # computes the total "weight" of the spanning tree and selects the
+ # minmal total weight. By making the weights random, this will select
+ # a different spanning tree each time. This works, but it does not
+ # in any way depend on the optimization.
+ #
+ # Why isn't the uniform_spanning_tree() below adequate? It takes
+ # a random walk at each point to create the spanning tree. This
+ # would seem to be a much cheaper way to calculate a spanning tree.
+ #
+ # What am I missing???
+ #
+ # The region_surcharge allows the caller to tweak the ramdommess
+ # which might be useful...
+
"""
+ frm: RX Documentation:
+
+ As far as I can tell a spanning tree is only ever used to populate a PopulatedGraph
+ and so, there is no need to worry about translating the spanning tree's nodes into
+ the context of the parent. Stated differently, a spanning tree is not used to
+ compute something about a subgraph but rather to compute something about whatever
+ graph is currently being dealt with.
+
+ In short, I am assuming that we can ignore the fact that RX subgraphs have different
+ node_ids for this function and all will be well...
+ """
+
+ # frm: TODO: Refactoring: WTF is up with region_surcharge being unset? The region_surcharge
+ # is only ever accessed in this routine in the for-loop below to
+ # increase the weight on the edge - setting it to be an empty dict
+ # just prevents the code below from blowing up. Why not just put
+ # a test for the surcharge for-loop alone:
+ #
+ # if not region_surcharge is None:
+ # for key, value in region_surcharge.items():
+ # ...
+ #
+ # Peter's comments from PR:
+ #
+ # peterrrock2 last week
+ # This is one of mine. I added the region surcharge stuff in an afternoon,
+ # so I probably did this to prevent the more than 3 levels of indentation
+ # and to make the reasoning easier to track as I was adding the feature.
+ #
+ # Collaborator
+ # Author
+ # @peterrrock2 peterrrock2 last week
+ # Also, I imagine that I originally wanted the function modification to look like
+ #
+ # def random_spanning_tree(
+ # graph: Graph,
+ # region_surcharge: dict = dict()
+ # ) -> Graph:
+ #
+ # but doing this sort of thing is generally a bad idea in python since the
+ # dict() is instantiated at import time and then all future calls to the
+ # function reference the same dict when the surcharge is unset. Not a problem
+ # for this function, but the accepted best-practice is to change the above to
+ #
+ # def random_spanning_tree(
+ # graph: Graph,
+ # region_surcharge: Optional[Dict] = None
+ # ) -> Graph:
+ # if region_surcharge is None:
+ # region_surcharge = dict()
+ #
+ # since this doesn't reuse the reference.
+
if region_surcharge is None:
region_surcharge = dict()
- for edge in graph.edges():
+ # Add a random weight to each edge in the graph with the goal of
+ # causing the selection of a different (random) spanning tree based
+ # on those weights.
+ #
+ # If a region_surcharge was passed in, then we want to add additional
+ # weight to edges that cross regions or that have a node that is
+ # not in any region. For example, if we want to keep municipalities
+ # together in the same district, the region_surcharge would contain
+ # an additional weight associated with the key for municipalities (say
+ # "mini") and if an edge went from one municipality to another or if
+ # either of the nodes in the edge were not in a municipality, then
+ # the edge would be given the additional weight (value) associated
+ # with the region_surcharge. This would preference/bias the
+ # spanning_tree algorithm to select other edges... which would have
+ # the effect of prioritizing keeping regions intact.
+
+ # frm: TODO: Documentation: Verify that the comment above about region_surcharge is accurate
+
+ # Add random weights to the edges in the graph so that the spanning tree
+ # algorithm will select a different spanning tree each time.
+ #
+ for edge_id in graph.edge_indices:
+ edge = graph.get_edge_from_edge_id(edge_id)
weight = random.random()
+
+ # If there are any entries in the region_surcharge dict, then add
+ # additional weight to the edge for 1) edges that cross region boundaries (one
+ # node is in one region and the other node is in a different region) and 2) edges
+ # where one (or both) of the nodes is not in a region
for key, value in region_surcharge.items():
# We surcharge edges that cross regions and those that are not in any region
if (
- graph.nodes[edge[0]][key] != graph.nodes[edge[1]][key]
- or graph.nodes[edge[0]][key] is None
- or graph.nodes[edge[1]][key] is None
+ graph.node_data(edge[0])[key] != graph.node_data(edge[1])[key]
+ or graph.node_data(edge[0])[key] is None
+ or graph.node_data(edge[1])[key] is None
):
weight += value
- graph.edges[edge]["random_weight"] = weight
+ graph.edge_data(edge_id)["random_weight"] = weight
+
+ # frm: TODO: Refactoring: Code: CROCK: (for the moment)
+ # We need to create a minimum spanning tree but the way to do so
+ # is different for NX and RX. I am sure that there is a more elegant
+ # way to do this, and in any event, this dependence on NX vs RX
+ # should not be in this file, tree.py, but for now, I am just trying
+ # to get this to work, so I am using CROCKS...
+
+ graph.verify_graph_is_valid()
+
+ # frm: TODO: Refactoring: Remove NX / RX dependency - maybe move to graph.py
+
+ # frm: TODO: Documentation: Think a bit about original_nx_node_ids
+ #
+ # Original node_ids refer to the node_ids used when a graph was created.
+ # This mostly means remembering the NX node_ids when you create an RX
+ # based Graph object. In the code below, we create an RX based Graph
+ # object, but we do not do anything to map original node_ids. This is
+ # probably OK, but it depends on how the spanning tree is used elsewhere.
+ #
+ # In short, worth some thought...
+
+ if graph.is_nx_graph():
+ nx_graph = graph.get_nx_graph()
+ spanning_tree = nxtree.minimum_spanning_tree(
+ nx_graph, algorithm="kruskal", weight="random_weight"
+ )
+ spanningGraph = Graph.from_networkx(spanning_tree)
+ elif graph.is_rx_graph():
+ rx_graph = graph.get_rx_graph()
- spanning_tree = tree.minimum_spanning_tree(
- graph, algorithm="kruskal", weight="random_weight"
- )
- return spanning_tree
+ def get_weight(edge_data):
+ # function to get the weight of an edge from its data
+ # This function is passed a dict with the data for the edge.
+ return edge_data["random_weight"]
+
+ spanning_tree = rx.minimum_spanning_tree(rx_graph, get_weight)
+ spanningGraph = Graph.from_rustworkx(spanning_tree)
+ else:
+ raise Exception("random_spanning_tree - bad kind of graph object")
+ return spanningGraph
-def uniform_spanning_tree(
- graph: nx.Graph, choice: Callable = random.choice
-) -> nx.Graph:
+
+def uniform_spanning_tree(graph: Graph, choice: Callable = random.choice) -> Graph:
"""
Builds a spanning tree chosen uniformly from the space of all
spanning trees of the graph. Uses Wilson's algorithm.
- :param graph: Networkx Graph
- :type graph: nx.Graph
+ :param graph: Graph
+ :type graph: Graph
:param choice: :func:`random.choice`. Defaults to :func:`random.choice`.
:type choice: Callable, optional
:returns: A spanning tree of the graph chosen uniformly at random.
- :rtype: nx.Graph
+ :rtype: Graph
"""
- root = choice(list(graph.node_indices))
- tree_nodes = set([root])
- next_node = {root: None}
- for node in graph.node_indices:
- u = node
+ """
+ frm: RX Docmentation:
+
+ As with random_spanning_tree, I am assuming that the issue of RX subgraphs having
+ different node_ids is not an issue for this routine...
+ """
+ # Pick a starting point at random
+ root_id = choice(list(graph.node_indices))
+ tree_nodes = set([root_id])
+ next_node_id = {root_id: None}
+
+ # frm: I think that this builds a tree bottom up. It takes
+ # every node in the graph (in sequence). If the node
+ # is already in the list of nodes that have been seen
+ # which means it has a neighbor registered as a next_node,
+ # then it is skipped. If this node does not yet have
+ # a neighbor registered, then it is given one, and
+ # that neighbor becomes the next node looked at.
+ #
+ # This essentially takes a node and travels "up" until
+ # it finds a node that is already in the tree. Multiple
+ # nodes can end up with the same "next_node" - which
+ # in tree-speak means that next_node is the parent of
+ # all of the nodes that end on it.
+
+ for node_id in graph.node_indices:
+ u = node_id
while u not in tree_nodes:
- next_node[u] = choice(list(graph.neighbors(u)))
- u = next_node[u]
+ next_node_id[u] = choice(list(graph.neighbors(u)))
+ u = next_node_id[u]
- u = node
+ u = node_id
while u not in tree_nodes:
tree_nodes.add(u)
- u = next_node[u]
+ u = next_node_id[u]
+
+ # frm DONE: To support RX, I added an add_edge() method to Graph.
- G = nx.Graph()
- for node in tree_nodes:
- if next_node[node] is not None:
- G.add_edge(node, next_node[node])
+ # frm: TODO: Refactoring: Remove dependency on NX below
+
+ nx_graph = nx.Graph()
+ G = Graph.from_networkx(nx_graph)
+
+ for node_id in tree_nodes:
+ if next_node_id[node_id] is not None:
+ G.add_edge(node_id, next_node_id[node_id])
return G
+# frm TODO: Documentation: PopulatedGraph - state that this only exists in tree.py
+#
+# I think that this is only ever used inside this module (except)
+# for testing.
+#
+# Decide if this is intended to only ever be used inside tree.py (and for testing),
+# and if so: 1) document that fact and 2) see if there is any Pythonic convention
+# for a class that is intended to NOT be used externally (like a leading underscore)
+#
class PopulatedGraph:
"""
A class representing a graph with population information.
:ivar graph: The underlying graph structure.
- :type graph: nx.Graph
+ :type graph: Graph
:ivar subsets: A dictionary mapping nodes to their subsets.
:type subsets: Dict
:ivar population: A dictionary mapping nodes to their populations.
@@ -153,14 +385,14 @@ class PopulatedGraph:
def __init__(
self,
- graph: nx.Graph,
+ graph: Graph,
populations: Dict,
ideal_pop: Union[float, int],
epsilon: float,
) -> None:
"""
:param graph: The underlying graph structure.
- :type graph: nx.Graph
+ :type graph: Graph
:param populations: A dictionary mapping nodes to their populations.
:type populations: Dict
:param ideal_pop: The ideal population for each district.
@@ -170,15 +402,39 @@ def __init__(
:type epsilon: float
"""
self.graph = graph
- self.subsets = {node: {node} for node in graph.nodes}
+ self.subsets = {node_id: {node_id} for node_id in graph.node_indices}
self.population = populations.copy()
self.tot_pop = sum(self.population.values())
self.ideal_pop = ideal_pop
self.epsilon = epsilon
- self._degrees = {node: graph.degree(node) for node in graph.nodes}
+ self._degrees = {node_id: graph.degree(node_id) for node_id in graph.node_indices}
+
+ # frm: TODO: Refactor: _degrees ??? Why separately store the degree of every node?
+ #
+ # The _degrees data member above is used to define a method below called "degree()"
+ # What is odd is that the implementation of this degree() method could just as
+ # easily have been self.graph.degree(node_id). And in fact, every call on the
+ # new degree function could be replaced with just .graph.degree(node_id)
+ #
+ # So unless there is a big performace gain (or some other reason), I would be
+ # in favor of deleting the degree() method below and just using
+ # .graph.degree(node_id) on the assumption that both NX and RX
+ # have an efficient implementation of degree()...
def __iter__(self):
- return iter(self.graph)
+ # Note: in the pre RustworkX code, this was implemented as:
+ #
+ # return iter(self.graph)
+ #
+ # But RustworkX does not support __iter__() - it is not iterable.
+ #
+ # The way to do this in the new RustworkX based code is to use
+ # the node_indices() method which is accessed as a property as in:
+ #
+ # for node_id in graph.node_indices:
+ # ...do something with the node_id
+ #
+ raise NotImplementedError("Graph is not iterable - use graph.node_indices instead")
def degree(self, node) -> int:
return self._degrees[node]
@@ -188,6 +444,8 @@ def contract_node(self, node, parent) -> None:
self.subsets[parent] |= self.subsets[node]
self._degrees[parent] -= 1
+ # frm: only ever used inside this file
+ # But maybe this is intended to be used externally...
def has_ideal_population(self, node, one_sided_cut: bool = False) -> bool:
"""
Checks if a node has an ideal population within the graph up to epsilon.
@@ -204,11 +462,25 @@ def has_ideal_population(self, node, one_sided_cut: bool = False) -> bool:
:returns: True if the node has an ideal population within the graph up to epsilon.
:rtype: bool
"""
+
+ # frm: TODO: Refactoring: Create a helper function for this
+ #
+ # This logic is repeated several times in this file. Consider
+ # refactoring the code so that the logic lives in exactly
+ # one place.
+ #
+ # When thinking about refactoring, consider whether it makes
+ # sense to toggle what this routine does by the "one_sided_cut"
+ # parameter. Why not have two separate routines with
+ # similar but distinguishing names. I need to be absolutely
+ # clear about what the two cases are all about, but my current
+ # hypothesis is that when one_sided_cut == False, we are looking
+ # for the edge which when cut produces two districts of
+ # approximately equal size - so a bisect rather than a find all
+ # meaning...
+
if one_sided_cut:
- return (
- abs(self.population[node] - self.ideal_pop)
- < self.epsilon * self.ideal_pop
- )
+ return abs(self.population[node] - self.ideal_pop) < self.epsilon * self.ideal_pop
return (
abs(self.population[node] - self.ideal_pop) <= self.epsilon * self.ideal_pop
@@ -217,9 +489,7 @@ def has_ideal_population(self, node, one_sided_cut: bool = False) -> bool:
)
def __repr__(self) -> str:
- graph_info = (
- f"Graph(nodes={len(self.graph.nodes)}, edges={len(self.graph.edges)})"
- )
+ graph_info = f"Graph(nodes={len(self.graph.node_indices)}, edges={len(self.graph.edges)})"
return (
f"{self.__class__.__name__}("
f"graph={graph_info}, "
@@ -229,15 +499,35 @@ def __repr__(self) -> str:
)
+# frm: ???: Is a Cut used anywhere outside this file?
+
+# Definition of Cut namedtuple
# Tuple that is used in the find_balanced_edge_cuts function
Cut = namedtuple("Cut", "edge weight subset")
Cut.__new__.__defaults__ = (None, None, None)
Cut.__doc__ = "Represents a cut in a graph."
Cut.edge.__doc__ = "The edge where the cut is made. Defaults to None."
Cut.weight.__doc__ = "The weight assigned to the edge (if any). Defaults to None."
-Cut.subset.__doc__ = (
- "The (frozen) subset of nodes on one side of the cut. Defaults to None."
-)
+Cut.subset.__doc__ = "The (frozen) subset of nodes on one side of the cut. Defaults to None."
+
+# frm: TODO: Documentation: Document what Cut objects are used for
+#
+# Not sure how this is used, and so I do not know whether it needs
+# to translate node_ids to the parent_node_id context. I am assuming not...
+#
+# Here is an example of how it is used (in test_tree.py):
+#
+# method=partial(
+# bipartition_tree,
+# max_attempts=10000,
+# balance_edge_fn=find_balanced_edge_cuts_contraction,
+#
+# and another in the same test file:
+#
+# populated_tree = PopulatedGraph(
+# tree, {node: 1 for node in tree}, len(tree) / 2, 0.5
+# )
+# cuts = find_balanced_edge_cuts_contraction(populated_tree)
def find_balanced_edge_cuts_contraction(
@@ -261,27 +551,56 @@ def find_balanced_edge_cuts_contraction(
:rtype: List[Cut]
"""
- root = choice([x for x in h if h.degree(x) > 1])
+ root = choice([node_id for node_id in h.graph.node_indices if h.degree(node_id) > 1])
# BFS predecessors for iteratively contracting leaves
- pred = predecessors(h.graph, root)
+ pred = h.graph.predecessors(root)
cuts = []
- leaves = deque(x for x in h if h.degree(x) == 1)
+
+ # frm: Work up from leaf nodes to find subtrees with the "correct"
+ # population. The algorighm starts with real leaf nodes, but
+ # if a node does not have the "correct" population, then that
+ # node is merged (contracted) into its parent, effectively
+ # creating another leaf node which is then added to the end
+ # of the queue.
+ #
+ # In this way, we calculate the total population of subtrees
+ # by going bottom up, until we find a subtree that has the
+ # "correct" population for a cut.
+
+ # frm: ??? Note that there is at least one other routine in this file
+ # that does something similar (perhaps exactly the same).
+ # Need to figure out why there are more than one way to do this...
+
+ leaves = deque(node_id for node_id in h.graph.node_indices if h.degree(node_id) == 1)
while len(leaves) > 0:
leaf = leaves.popleft()
if h.has_ideal_population(leaf, one_sided_cut=one_sided_cut):
+ # frm: If the population of the subtree rooted in this node is the correct
+ # size, then add it to the cut list. Note that if one_sided_cut == False,
+ # then the cut means the cut bisects the partition (frm: ??? need to verify
+ # this).
e = (leaf, pred[leaf])
cuts.append(
Cut(
edge=e,
- weight=h.graph.edges[e].get("random_weight", random.random()),
+ weight=h.graph.edge_data(h.graph.get_edge_id_from_edge(e)).get(
+ "random_weight", random.random()
+ ),
subset=frozenset(h.subsets[leaf].copy()),
)
)
- # Contract the leaf:
+ # Contract the leaf: frm: merge the leaf's population into the parent and add the
+ # parent to "leaves"
parent = pred[leaf]
+ # frm: Add child population and subsets to parent, reduce parent's degree by 1
+ # This effectively removes the leaf from the tree, adding all of its data
+ # to the parent.
h.contract_node(leaf, parent)
if h.degree(parent) == 1 and parent != root:
+ # frm: Only add the parent to the end of the queue when we are merging
+ # the last leaf - this makes sure we only add the parent node to
+ # the queue one time...
leaves.append(parent)
return cuts
@@ -301,6 +620,18 @@ def _calc_pops(succ, root, h):
:returns: A dictionary mapping nodes to their subtree populations.
:rtype: Dict
"""
+ # frm: This took me a while to sort out what was going on.
+ # Conceptually it is easy - given a tree anchored in a root node,
+ # calculate the population in each subtree going bottom-up.
+ # The stack (deque) provides the mechanism for going bottom-up.
+ # On the way down, you just put nodes in the stack (append is like
+ # push() which seems odd to me, but whatever...) then on the way back
+ # up, you add the totals for each child to your own population and
+ # presto you have the total population for each subtree...
+ #
+ # For this to work, you just need to have a list of nodes with
+ # their successors associated with them...
+ #
subtree_pops: Dict[Any, Union[int, float]] = {}
stack = deque(n for n in succ[root])
while stack:
@@ -322,6 +653,7 @@ def _calc_pops(succ, root, h):
return subtree_pops
+# frm: Only used in one function and only in this module...
def _part_nodes(start, succ):
"""
Partitions the nodes of a graph into two sets.
@@ -335,6 +667,39 @@ def _part_nodes(start, succ):
:returns: A set of nodes for a particular district (only one side of the cut).
:rtype: Set
"""
+
+ """
+ frm: Compute the nodes in a subtree defined by a Cut.
+
+ This routine computes the set of nodes in a subtree rooted in the
+ node identified by "start" in the tree defined by "succ".
+
+ As such it is highly dependent on context and is not generally
+ useful. That is, it is essentially just a way to refactor some
+ code used in a couple of places so that the logic in the code is
+ in one place instead of several.
+
+ To be specific, Cuts are always relative to a specific tree for
+ a partition. This tree is a "spanning tree" that converts the
+ graph into a DAG. Cuts are then computed by finding subtrees
+ of that DAG that have the appropriate population (this could
+ presumably be modified to include other factors).
+
+ When a Cut is created, we want to collect all of the nodes that
+ are in the subtree, and this is what this routine does. It
+ merely starts at the root of the subtree (start) and goes down
+ the subtree, adding each node to a set.
+
+ frm: TODO: Documentation: Rename this to be more descriptive - perhaps ]
+ something like: _nodes_in_subtree() or
+ _nodes_for_cut()
+
+ frm: TODO: Documentation: Add the above explanation for what a Cut is and how
+ we find them by converting the graph to a DAG and
+ then looking for subtrees to a block header at the
+ top of this file. It will give the reader some
+ idea wtf is going on... ;-)
+ """
nodes = set()
queue = deque([start])
while queue:
@@ -348,6 +713,7 @@ def _part_nodes(start, succ):
return nodes
+# frm: used externally by tree_proposals.py
def find_balanced_edge_cuts_memoization(
h: PopulatedGraph, one_sided_cut: bool = False, choice: Callable = random.choice
) -> List[Cut]:
@@ -374,11 +740,37 @@ def find_balanced_edge_cuts_memoization(
:rtype: List[Cut]
"""
- root = choice([x for x in h if h.degree(x) > 1])
- pred = predecessors(h.graph, root)
- succ = successors(h.graph, root)
+ """
+ frm: ???: confused...
+
+ This function seems to be used for two very different purposes, depending on the
+ value of the parameter, one_sided_cut. When true, the code looks for lots of cuts
+ that would create a district with the right population - both above and below the
+ node being considered. Given that it is operating on a tree, one would assume that
+ there is only one (or perhaps two if one node's population was tiny) cut for the top
+ of the tree, but there should be many for the bottom of the tree.
+
+ However, if the paramter is set to false (the default), then the code checks to see
+ whether a cut would produce two districts - on above and one below the tree that
+ have the right populations. In this case, the code is presumatly looking for the
+ single node (again there might be two if one node's population was way below epsilon)
+ that would bisect the graph into two districts with a tolerable population.
+
+ If I am correct, then there is an opportunity to clarify these two uses - perhaps
+ with wrapper functions. I am also a bit surprised that snippets of code are repeated.
+ Again - this causes mental load for the reader, and it is an opportunity for bugs to
+ creep in later (you fix it in one place but not the other). Not sure this "clarification"
+ is desired, but it is worth considering...
+ """
+
+ # frm: ???: Why does a root have to have degree > 1? I would think that any node would do...
+
+ root = choice([node_id for node_id in h.graph.node_indices if h.degree(node_id) > 1])
+ pred = h.graph.predecessors(root)
+ succ = h.graph.successors(root)
total_pop = h.tot_pop
+ # Calculate the population of each subtree in the "succ" tree
subtree_pops = _calc_pops(succ, root, h)
cuts = []
@@ -386,44 +778,71 @@ def find_balanced_edge_cuts_memoization(
if one_sided_cut:
for node, tree_pop in subtree_pops.items():
if abs(tree_pop - h.ideal_pop) <= h.ideal_pop * h.epsilon:
- e = (node, pred[node])
+ # frm: If the subtree for this node has a population within epsilon
+ # of the ideal, then add it to the cuts list.
+ e = (node, pred[node]) # get the edge from the parent to this node
wt = random.random()
+ # frm: Add the cut - set its weight if it does not already have one
+ # and remember all of the nodes in the subtree in the frozenset
cuts.append(
Cut(
edge=e,
- weight=h.graph.edges[e].get("random_weight", wt),
+ weight=h.graph.edge_data(h.graph.get_edge_id_from_edge(e)).get(
+ "random_weight", wt
+ ),
subset=frozenset(_part_nodes(node, succ)),
)
)
elif abs((total_pop - tree_pop) - h.ideal_pop) <= h.ideal_pop * h.epsilon:
+ # frm: If the population of everything ABOVE this node in the tree is
+ # within epsilon of the ideal, then add it to the cut list too.
e = (node, pred[node])
wt = random.random()
cuts.append(
Cut(
edge=e,
- weight=h.graph.edges[e].get("random_weight", wt),
- subset=frozenset(set(h.graph.nodes) - _part_nodes(node, succ)),
+ weight=h.graph.edge_data(h.graph.get_edge_id_from_edge(e)).get(
+ "random_weight", wt
+ ),
+ subset=frozenset(set(h.graph.node_indices) - _part_nodes(node, succ)),
)
)
return cuts
+ # frm: TODO: Refactoring: this code to make its two use cases clearer:
+ #
+ # One use case is bisecting the graph (one_sided_cut is False). The
+ # other use case is to peel off one part (district) with the appropriate
+ # population.
+ #
+ # Not quite clear yet exactly how to do this, but a return stmt in the middle
+ # of the routine (above) is a clear sign that something is odd. Perhaps
+ # we keep the existing function signature but immediately split the code
+ # into calls on two separate routines - one for each use case.
+
+ # We are looking for a way to bisect the graph (one_sided_cut is False)
for node, tree_pop in subtree_pops.items():
+
if (abs(tree_pop - h.ideal_pop) <= h.ideal_pop * h.epsilon) and (
abs((total_pop - tree_pop) - h.ideal_pop) <= h.ideal_pop * h.epsilon
):
e = (node, pred[node])
wt = random.random()
+ # frm: TODO: Performance: Think if code below can be made faster...
cuts.append(
Cut(
edge=e,
- weight=h.graph.edges[e].get("random_weight", wt),
- subset=frozenset(set(h.graph.nodes) - _part_nodes(node, succ)),
+ weight=h.graph.edge_data(h.graph.get_edge_id_from_edge(e)).get(
+ "random_weight", wt
+ ),
+ subset=frozenset(set(h.graph.node_indices) - _part_nodes(node, succ)),
)
)
return cuts
+# frm: only used in this file and in a test
class BipartitionWarning(UserWarning):
"""
Generally raised when it is proving difficult to find a balanced cut.
@@ -432,6 +851,7 @@ class BipartitionWarning(UserWarning):
pass
+# frm: only used in this file and in a test
class ReselectException(Exception):
"""
Raised when the tree-splitting algorithm is unable to find a
@@ -477,19 +897,28 @@ def _max_weight_choice(cut_edge_list: List[Cut]) -> Cut:
if not isinstance(cut_edge_list[0], Cut) or cut_edge_list[0].weight is None:
return random.choice(cut_edge_list)
+ # frm: ???: this strikes me as possibly expensive. Computing the
+ # max in a list is O(N) so not terrible, but this
+ # might be called lots of times (need to know more about
+ # how it is used). Would it make sense to have the
+ # cut_edge_list sorted before it is frozen? I think it
+ # is now a set, so it would need to be a list... Not
+ # urgent, but worth looking into at some point...
+ #
return max(cut_edge_list, key=lambda cut: cut.weight)
+# frm: TODO: Documentation: document what _power_set_sorted_by_size_then_sum() does
+#
+# Figure out what this does. There is no NX/RX issue here, I just
+# don't yet know what it does or why...
+# Note that this is only ever used once...
def _power_set_sorted_by_size_then_sum(d):
- power_set = [
- s for i in range(1, len(d) + 1) for s in itertools.combinations(d.keys(), i)
- ]
+ power_set = [s for i in range(1, len(d) + 1) for s in itertools.combinations(d.keys(), i)]
# Sort the subsets in descending order based on
# the sum of their corresponding values in the dictionary
- sorted_power_set = sorted(
- power_set, key=lambda s: (len(s), sum(d[i] for i in s)), reverse=True
- )
+ sorted_power_set = sorted(power_set, key=lambda s: (len(s), sum(d[i] for i in s)), reverse=True)
return sorted_power_set
@@ -501,6 +930,8 @@ def _power_set_sorted_by_size_then_sum(d):
def _region_preferred_max_weight_choice(
populated_graph: PopulatedGraph, region_surcharge: Dict, cut_edge_list: List[Cut]
) -> Cut:
+ # frm: ???: There is no NX/RX dependency in this routine, but I do
+ # not yet understand what it does or why...
"""
This function is used in the case of a region-aware chain. It
is similar to the as :meth:`_max_weight_choice` function except
@@ -551,9 +982,24 @@ def _region_preferred_max_weight_choice(
# Prepare data for efficient access
edge_region_info = {
cut: {
+ # frm: This code is a bit dense (at least for me).
+ # Given a cut_edge_list (whose elements have an
+ # attribute, "edge",) construct a dict
+ # that associates with each "cut" the
+ # values of the region_surcharge values
+ # for both nodes in the edge.
+ #
+ # So, if the region_surcharge dict was
+ # {"muni": 0.2, "water": 0.8} then for
+ # each cut, cut_n, there would be a
+ # dict value that looked like:
+ # {"muni": ("siteA", "siteA",
+ # "water": ("water1", "water2")
+ # }
+ #
key: (
- populated_graph.graph.nodes[cut.edge[0]].get(key),
- populated_graph.graph.nodes[cut.edge[1]].get(key),
+ populated_graph.graph.node_data(cut.edge[0]).get(key),
+ populated_graph.graph.node_data(cut.edge[1]).get(key),
)
for key in region_surcharge
}
@@ -578,13 +1024,44 @@ def _region_preferred_max_weight_choice(
return _max_weight_choice(cut_edge_list)
+# frm TODO: Refactoring: def bipartition_tree(
+#
+# This might get complicated depending on what kinds of functions
+# are used as parameters. That is, do the functions used as parameters
+# assume they are working with an NX graph?
+#
+# I think all of the functions used as parameters have been converted
+# to work on the new Graph object, but perhaps end users have created
+# their own? Should probably add logic to verify that the
+# functions are not written to be operating on an NX Graph. Not sure
+# how to do that though...
+#
+# Peter's comments from PR:
+#
+# Users do sometimes write custom spanning tree and cut edge functions. My
+# recommendation would be to make this simple for now. Have a list of "RX_compatible"
+# functions and then have the MarkovChain class do some coersion to store an
+# appropriate graph and partition object at initialization. We always expect
+# the workflow to be something like
+#
+# Graph -> Partition -> MarkovChain
+#
+# But we do copy operations in each step, so I wouldn't expect any weird
+# side-effects from pushing the determination of what graph type to use
+# off onto the MarkovChain class
+
+# frm: used in this file and in tree_proposals.py
+# But maybe this is intended to be used externally...
+#
+
+
def bipartition_tree(
- graph: nx.Graph,
+ subgraph_to_split: Graph,
pop_col: str,
pop_target: Union[int, float],
epsilon: float,
node_repeats: int = 1,
- spanning_tree: Optional[nx.Graph] = None,
+ spanning_tree: Optional[Graph] = None,
spanning_tree_fn: Callable = random_spanning_tree,
region_surcharge: Optional[Dict] = None,
balance_edge_fn: Callable = find_balanced_edge_cuts_memoization,
@@ -595,6 +1072,10 @@ def bipartition_tree(
allow_pair_reselection: bool = False,
cut_choice: Callable = _region_preferred_max_weight_choice,
) -> Set:
+ # frm: TODO: Refactoring: Change the names of ALL function formal parameters to end
+ # in "_fn" - to make it clear that the paraemter is a function. This will make it
+ # easier to do a global search to find all function parameters - as well as just being
+ # good coding practice...
"""
This function finds a balanced 2 partition of a graph by drawing a
spanning tree and finding an edge to cut that leaves at most an epsilon
@@ -605,7 +1086,7 @@ def bipartition_tree(
is ``epsilon * pop_target`` away from ``pop_target``.
:param graph: The graph to partition.
- :type graph: nx.Graph
+ :type graph: Graph
:param pop_col: The node attribute holding the population of each node.
:type pop_col: str
:param pop_target: The target population for the returned subset of nodes.
@@ -618,7 +1099,7 @@ def bipartition_tree(
:type node_repeats: int, optional
:param spanning_tree: The spanning tree for the algorithm to use (used when the
algorithm chooses a new root and for testing).
- :type spanning_tree: Optional[nx.Graph], optional
+ :type spanning_tree: Optional[Graph], optional
:param spanning_tree_fn: The random spanning tree algorithm to use if a spanning
tree is not provided. Defaults to :func:`random_spanning_tree`.
:type spanning_tree_fn: Callable, optional
@@ -661,40 +1142,112 @@ def bipartition_tree(
given by ``max_attempts``.
"""
# Try to add the region-aware in if the spanning_tree_fn accepts a surcharge dictionary
+ # frm ???: REALLY??? You are going to change the semantics of your program based on the
+ # a function argument's signature? What if someone refactors the code to have
+ # different names??? *sigh*
+ #
+ # A better strategy would be to lock in the function signature for ALL spanning_tree
+ # functions and then just have the region_surcharge parameter not be used in some of them...
+ #
+ # Same with "one_sided_cut"
+ #
+ # Oh - and change "one_sided_cut" to be something a little more intuitive. I have to
+ # reset my mind every time I see it to figure out whether it means to split into
+ # two districts or just peel off one district... *sigh* Before doing this, check to
+ # see if "one_sided_cut" is a term of art that might make sense to some set of experts...
+ #
if "region_surcharge" in signature(spanning_tree_fn).parameters:
spanning_tree_fn = partial(spanning_tree_fn, region_surcharge=region_surcharge)
if "one_sided_cut" in signature(balance_edge_fn).parameters:
balance_edge_fn = partial(balance_edge_fn, one_sided_cut=one_sided_cut)
- populations = {node: graph.nodes[node][pop_col] for node in graph.node_indices}
+ # dict of node_id: population for the nodes in the subgraph
+ populations = {
+ node_id: subgraph_to_split.node_data(node_id)[pop_col]
+ for node_id in subgraph_to_split.node_indices
+ }
+
+ # frm: TODO: Debugging: Remove debugging code
+ # print(" ")
+ # print(f"bipartition_tree(): Entering...")
+ # print(f"bipartition_tree(): balance_edge_fn is: {balance_edge_fn}")
+ # print(f"bipartition_tree(): spanning_tree_fn is: {spanning_tree_fn}")
+ # print(f"bipartition_tree(): populations in subgraph are: {populations}")
possible_cuts: List[Cut] = []
if spanning_tree is None:
- spanning_tree = spanning_tree_fn(graph)
+ spanning_tree = spanning_tree_fn(subgraph_to_split)
+
+ # print(" ")
+ # print(f"bipartition_tree(): subgraph edges: {subgraph_to_split.edges}")
+ # print(f"bipartition_tree(): initial spanning_tree edges: {spanning_tree.edges}")
restarts = 0
attempts = 0
while max_attempts is None or attempts < max_attempts:
if restarts == node_repeats:
- spanning_tree = spanning_tree_fn(graph)
+ spanning_tree = spanning_tree_fn(subgraph_to_split)
+ # print(f"bipartition_tree(): new spanning_tree edges: {spanning_tree.edges}")
restarts = 0
h = PopulatedGraph(spanning_tree, populations, pop_target, epsilon)
+ # frm: TODO: Refactoring: Again - we should NOT be changing semantics based
+ # on the names in signatures...
+ # Better approach is to have all of the poosible paramters exist
+ # in ALL of the versions of the cut_choice() functions and to
+ # have them default to None if not used by one of the functions.
+ # Then this code could just pass in the values to the
+ # cut_choice function, and it could make sense of what to do.
+ #
+ # This makes it clear what the overall and comprehensive purpose
+ # of cut_choice functions are. This centralizes the knowlege
+ # of what a cut_choice() function is supposed to do - or at least
+ # it prompts the programmer to document that a param in the
+ # general scheme does not apply in a given instance.
+ #
+ # I realize that this is perhaps not "pythonic" - in that it
+ # forces the programmer to document overall behavior instead
+ # of just finding a convenient way to sneak in something new.
+ # However, when code gets complicated, sneaky/clever code
+ # is just not worth it - better to have each change be a little
+ # more painful (needing to change the function signature for
+ # all instances of a generic function to add new functionality
+ # that is only needed by one new instance). This provides
+ # a natural place (in comments of the generic function instances)
+ # to describe what is going on - and it alerts programmers
+ # that a given generic function has perhaps many different
+ # instances - but that they all share the same high level
+ # responsibility.
+
is_region_cut = (
"region_surcharge" in signature(cut_choice).parameters
and "populated_graph" in signature(cut_choice).parameters
)
+ # frm: Find one or more edges in the spanning tree, that if cut would
+ # result in a subtree with the appropriate population.
+
# This returns a list of Cut objects with attributes edge and subset
possible_cuts = balance_edge_fn(h, choice=choice)
+ # frm: TODO: Debugging: Remove debugging code below
+ # print(f"bipartition_tree(): possible_cuts = {possible_cuts}")
+
+ # frm: RX Subgraph
if len(possible_cuts) != 0:
+ chosen_cut = None
if is_region_cut:
- return cut_choice(h, region_surcharge, possible_cuts).subset
-
- return cut_choice(possible_cuts).subset
+ chosen_cut = cut_choice(h, region_surcharge, possible_cuts)
+ else:
+ chosen_cut = cut_choice(possible_cuts)
+ translated_nodes = subgraph_to_split.translate_subgraph_node_ids_for_set_of_nodes(
+ chosen_cut.subset
+ )
+ # print(f"bipartition_tree(): translated_nodes = {translated_nodes}")
+ # frm: Not sure if it is important that the returned set be a frozenset...
+ return frozenset(translated_nodes)
restarts += 1
attempts += 1
@@ -719,23 +1272,33 @@ def bipartition_tree(
def _bipartition_tree_random_all(
- graph: nx.Graph,
+ #
+ # Note: Complexity Alert... _bipartition_tree_random_all does NOT translate node_ids to parent
+ #
+ # Unlike many/most of the routines in this module, _bipartition_tree_random_all() does
+ # not translate node_ids into the IDs of the parent, because calls to it are not made
+ # on subgraphs. That is, it returns possible Cuts using the same node_ids as the parent.
+ # It is up to the caller to translate node_ids (if appropriate).
+ #
+ graph_to_split: Graph,
pop_col: str,
pop_target: Union[int, float],
epsilon: float,
node_repeats: int = 1,
repeat_until_valid: bool = True,
- spanning_tree: Optional[nx.Graph] = None,
+ spanning_tree: Optional[Graph] = None,
spanning_tree_fn: Callable = random_spanning_tree,
balance_edge_fn: Callable = find_balanced_edge_cuts_memoization,
choice: Callable = random.choice,
max_attempts: Optional[int] = 100000,
-) -> List[Tuple[Hashable, Hashable]]:
+) -> List[
+ Tuple[Hashable, Hashable]
+]: # frm: TODO: Documentation: Change this to be a set of node_ids (ints)
"""
Randomly bipartitions a tree into two subgraphs until a valid bipartition is found.
:param graph: The input graph.
- :type graph: nx.Graph
+ :type graph: Graph
:param pop_col: The name of the column in the graph nodes that contains the population data.
:type pop_col: str
:param pop_target: The target population for each subgraph.
@@ -750,7 +1313,7 @@ def _bipartition_tree_random_all(
:type repeat_until_valid: bool, optional
:param spanning_tree: The spanning tree to use for bipartitioning. If None, a random spanning
tree will be generated. Defaults to None.
- :type spanning_tree: Optional[nx.Graph], optional
+ :type spanning_tree: Optional[Graph], optional
:param spanning_tree_fn: The function to generate a spanning tree. Defaults to
random_spanning_tree.
:type spanning_tree_fn: Callable, optional
@@ -770,18 +1333,22 @@ def _bipartition_tree_random_all(
attempts.
"""
- populations = {node: graph.nodes[node][pop_col] for node in graph.node_indices}
+ # dict of node_id: population for the nodes in the subgraph
+ populations = {
+ node_id: graph_to_split.node_data(node_id)[pop_col]
+ for node_id in graph_to_split.node_indices
+ }
possible_cuts = []
if spanning_tree is None:
- spanning_tree = spanning_tree_fn(graph)
+ spanning_tree = spanning_tree_fn(graph_to_split)
restarts = 0
attempts = 0
while max_attempts is None or attempts < max_attempts:
if restarts == node_repeats:
- spanning_tree = spanning_tree_fn(graph)
+ spanning_tree = spanning_tree_fn(graph_to_split)
restarts = 0
h = PopulatedGraph(spanning_tree, populations, pop_target, epsilon)
possible_cuts = balance_edge_fn(h, choice=choice)
@@ -795,14 +1362,130 @@ def _bipartition_tree_random_all(
raise RuntimeError(f"Could not find a possible cut after {max_attempts} attempts.")
+# frm: used in this file and in tree_proposals.py
+# But maybe this is intended to be used externally...
+
+
+#######################
+# frm: Note: This routine is EXACTLY the same as bipartition_tree_random() except
+# that it returns in addition to the nodes for a new district, the
+# number of possible new districts. This additional information
+# is needed by reversible_recom(), but I did not want to change the
+# function signature of bipartition_tree_random() in case it is used
+# as part of the public API by someone.
+#
+# It is bad form to have two functions that are the same excpet for
+# a tweak - an invitation for future bugs when you fix something in
+# one place and not the other, so maybe this is something we should
+# revisit when we decide a general code cleanup is in order...
+#
+def bipartition_tree_random_with_num_cuts(
+ graph: Graph,
+ pop_col: str,
+ pop_target: Union[int, float],
+ epsilon: float,
+ node_repeats: int = 1,
+ repeat_until_valid: bool = True,
+ spanning_tree: Optional[Graph] = None,
+ spanning_tree_fn: Callable = random_spanning_tree,
+ balance_edge_fn: Callable = find_balanced_edge_cuts_memoization,
+ one_sided_cut: bool = False,
+ choice: Callable = random.choice,
+ max_attempts: Optional[int] = 100000,
+) -> Union[Set[Any], None]:
+ """
+ This is like :func:`bipartition_tree` except it chooses a random balanced
+ cut, rather than the first cut it finds.
+
+ This function finds a balanced 2 partition of a graph by drawing a
+ spanning tree and finding an edge to cut that leaves at most an epsilon
+ imbalance between the populations of the parts. If a root fails, new roots
+ are tried until node_repeats in which case a new tree is drawn.
+
+ Builds up a connected subgraph with a connected complement whose population
+ is ``epsilon * pop_target`` away from ``pop_target``.
+
+ :param graph: The graph to partition.
+ :type graph: Graph
+ :param pop_col: The node attribute holding the population of each node.
+ :type pop_col: str
+ :param pop_target: The target population for the returned subset of nodes.
+ :type pop_target: Union[int, float]
+ :param epsilon: The allowable deviation from ``pop_target`` (as a percentage of
+ ``pop_target``) for the subgraph's population.
+ :type epsilon: float
+ :param node_repeats: A parameter for the algorithm: how many different choices
+ of root to use before drawing a new spanning tree. Defaults to 1.
+ :type node_repeats: int
+ :param repeat_until_valid: Determines whether to keep drawing spanning trees
+ until a tree with a balanced cut is found. If `True`, a set of nodes will
+ always be returned; if `False`, `None` will be returned if a valid spanning
+ tree is not found on the first try. Defaults to True.
+ :type repeat_until_valid: bool, optional
+ :param spanning_tree: The spanning tree for the algorithm to use (used when the
+ algorithm chooses a new root and for testing). Defaults to None.
+ :type spanning_tree: Optional[Graph], optional
+ :param spanning_tree_fn: The random spanning tree algorithm to use if a spanning
+ tree is not provided. Defaults to :func:`random_spanning_tree`.
+ :type spanning_tree_fn: Callable, optional
+ :param balance_edge_fn: The algorithm used to find balanced cut edges. Defaults to
+ :func:`find_balanced_edge_cuts_memoization`.
+ :type balance_edge_fn: Callable, optional
+ :param one_sided_cut: Passed to the ``balance_edge_fn``. Determines whether or not we are
+ cutting off a single district when partitioning the tree. When
+ set to False, we check if the node we are cutting and the remaining graph
+ are both within epsilon of the ideal population. When set to True, we only
+ check if the node we are cutting is within epsilon of the ideal population.
+ Defaults to False.
+ :type one_sided_cut: bool, optional
+ :param choice: The random choice function. Can be substituted for testing. Defaults
+ to :func:`random.choice`.
+ :type choice: Callable, optional
+ :param max_attempts: The max number of attempts that should be made to bipartition.
+ Defaults to None.
+ :type max_attempts: Optional[int], optional
+
+ :returns: A subset of nodes of ``graph`` (whose induced subgraph is connected) or None if a
+ valid spanning tree is not found.
+ :rtype: Union[Set[Any], None]
+ """
+
+ # frm: TODO: Refactoring: Again - semantics should not depend on signatures...
+ if "one_sided_cut" in signature(balance_edge_fn).parameters:
+ balance_edge_fn = partial(balance_edge_fn, one_sided_cut=True)
+
+ possible_cuts = _bipartition_tree_random_all(
+ graph_to_split=graph,
+ pop_col=pop_col,
+ pop_target=pop_target,
+ epsilon=epsilon,
+ node_repeats=node_repeats,
+ repeat_until_valid=repeat_until_valid,
+ spanning_tree=spanning_tree,
+ spanning_tree_fn=spanning_tree_fn,
+ balance_edge_fn=balance_edge_fn,
+ choice=choice,
+ max_attempts=max_attempts,
+ )
+ if possible_cuts:
+ chosen_cut = choice(possible_cuts)
+ num_cuts = len(possible_cuts)
+ parent_nodes = graph.translate_subgraph_node_ids_for_set_of_nodes(chosen_cut.subset)
+ return num_cuts, frozenset(parent_nodes) # frm: Not sure if important that it be frozenset
+ else:
+ return None
+
+
+#######################
+# frm TODO: Testing: Check to make sure there is a test for this...
def bipartition_tree_random(
- graph: nx.Graph,
+ subgraph_to_split: Graph,
pop_col: str,
pop_target: Union[int, float],
epsilon: float,
node_repeats: int = 1,
repeat_until_valid: bool = True,
- spanning_tree: Optional[nx.Graph] = None,
+ spanning_tree: Optional[Graph] = None,
spanning_tree_fn: Callable = random_spanning_tree,
balance_edge_fn: Callable = find_balanced_edge_cuts_memoization,
one_sided_cut: bool = False,
@@ -822,7 +1505,7 @@ def bipartition_tree_random(
is ``epsilon * pop_target`` away from ``pop_target``.
:param graph: The graph to partition.
- :type graph: nx.Graph
+ :type graph: Graph
:param pop_col: The node attribute holding the population of each node.
:type pop_col: str
:param pop_target: The target population for the returned subset of nodes.
@@ -840,7 +1523,7 @@ def bipartition_tree_random(
:type repeat_until_valid: bool, optional
:param spanning_tree: The spanning tree for the algorithm to use (used when the
algorithm chooses a new root and for testing). Defaults to None.
- :type spanning_tree: Optional[nx.Graph], optional
+ :type spanning_tree: Optional[Graph], optional
:param spanning_tree_fn: The random spanning tree algorithm to use if a spanning
tree is not provided. Defaults to :func:`random_spanning_tree`.
:type spanning_tree_fn: Callable, optional
@@ -865,11 +1548,24 @@ def bipartition_tree_random(
valid spanning tree is not found.
:rtype: Union[Set[Any], None]
"""
+
+ # frm: TODO: Refactoring: Again - semantics should not depend on signatures...
+ #
+ # This is odd - there are two balance_edge_functions defined in tree.py but
+ # both of them have a formal parameter with the name "one_sided_cut", so this
+ # code is not picking one of them. Perhaps there was an earlier version of
+ # the code where it allowed functions that did not support "one_sided_cut".
+ # In any event, it looks like this if-stmt is a no-op as far as the current
+ # codebase is concerned...
+ #
+ # Even odder - there is a formal parameter, one_sided_cut, which is never
+ # used...
+
if "one_sided_cut" in signature(balance_edge_fn).parameters:
balance_edge_fn = partial(balance_edge_fn, one_sided_cut=True)
possible_cuts = _bipartition_tree_random_all(
- graph=graph,
+ graph_to_split=subgraph_to_split,
pop_col=pop_col,
pop_target=pop_target,
epsilon=epsilon,
@@ -882,11 +1578,20 @@ def bipartition_tree_random(
max_attempts=max_attempts,
)
if possible_cuts:
- return choice(possible_cuts).subset
+ chosen_cut = choice(possible_cuts)
+ translated_nodes = subgraph_to_split.translate_subgraph_node_ids_for_set_of_nodes(
+ chosen_cut.subset
+ )
+ return frozenset(translated_nodes) # frm: Not sure if important that it be frozenset
+# frm: used in this file and in tree_proposals.py
+# But maybe this is intended to be used externally...
+
+
+# frm: Note that this routine is only used in recom()
def epsilon_tree_bipartition(
- graph: nx.Graph,
+ subgraph_to_split: Graph,
parts: Sequence,
pop_target: Union[float, int],
pop_col: str,
@@ -899,7 +1604,7 @@ def epsilon_tree_bipartition(
two parts of population ``pop_target`` (within ``epsilon``).
:param graph: The graph to partition into two :math:`\varepsilon`-balanced parts.
- :type graph: nx.Graph
+ :type graph: Graph
:param parts: Iterable of part (district) labels (like ``[0,1,2]`` or ``range(4)``).
:type parts: Sequence
:param pop_target: Target population for each part of the partition.
@@ -926,14 +1631,14 @@ def epsilon_tree_bipartition(
)
flips = {}
- remaining_nodes = graph.node_indices
+ remaining_nodes = subgraph_to_split.node_indices
lb_pop = pop_target * (1 - epsilon)
ub_pop = pop_target * (1 + epsilon)
check_pop = lambda x: lb_pop <= x <= ub_pop
nodes = method(
- graph.subgraph(remaining_nodes),
+ subgraph_to_split.subgraph(remaining_nodes),
pop_col=pop_col,
pop_target=pop_target,
epsilon=epsilon,
@@ -944,10 +1649,15 @@ def epsilon_tree_bipartition(
if nodes is None:
raise BalanceError()
+ # Calculate the total population for the two districts based on the
+ # results of the "method()" partitioning.
part_pop = 0
for node in nodes:
+ # frm: ???: The code above has already confirmed that len(parts) is 2
+ # so why use negative index values - why not just use
+ # parts[0] and parts[1]?
flips[node] = parts[-2]
- part_pop += graph.nodes[node][pop_col]
+ part_pop += subgraph_to_split.node_data(node)[pop_col]
if not check_pop(part_pop):
raise PopulationBalanceError()
@@ -958,18 +1668,23 @@ def epsilon_tree_bipartition(
part_pop = 0
for node in remaining_nodes:
flips[node] = parts[-1]
- part_pop += graph.nodes[node][pop_col]
+ part_pop += subgraph_to_split.node_data(node)[pop_col]
if not check_pop(part_pop):
raise PopulationBalanceError()
- return flips
+ # translate subgraph node_ids back into node_ids in parent graph
+ translated_flips = subgraph_to_split.translate_subgraph_node_ids_for_flips(flips)
+
+ return translated_flips
-# TODO: Move these recursive partition functions to their own module. They are not
-# central to the operation of the recom function despite being tree methods.
+# frm: TODO: Refactoring: Move these recursive partition functions to their own module. They
+# are not central to the operation of the recom function despite being tree methods.
+# frm: defined here but only used in partition.py
+# But maybe this is intended to be used externally...
def recursive_tree_part(
- graph: nx.Graph,
+ graph: Graph,
parts: Sequence,
pop_target: Union[float, int],
pop_col: str,
@@ -983,7 +1698,7 @@ def recursive_tree_part(
generate initial seed plans or to implement ReCom-like "merge walk" proposals.
:param graph: The graph to partition into ``len(parts)`` :math:`\varepsilon`-balanced parts.
- :type graph: nx.Graph
+ :type graph: Graph
:param parts: Iterable of part (district) labels (like ``[0,1,2]`` or ``range(4)``).
:type parts: Sequence
:param pop_target: Target population for each part of the partition.
@@ -1018,13 +1733,23 @@ def recursive_tree_part(
ub_pop = pop_target * (1 + epsilon)
check_pop = lambda x: lb_pop <= x <= ub_pop
+ # frm: Notes to self: The code in the for-loop creates n-2 districts (where n is
+ # the number of partitions desired) by calling the "method"
+ # function, whose job it is to produce a connected set of
+ # nodes that has the desired population target.
+ #
+ # Note that it sets one_sided_cut=True which tells the
+ # "method" function that it is NOT bisecting the graph
+ # but is rather supposed to just find one connected
+ # set of nodes of the correct population size.
+
for part in parts[:-2]:
min_pop = max(pop_target * (1 - epsilon), pop_target * (1 - epsilon) - debt)
max_pop = min(pop_target * (1 + epsilon), pop_target * (1 + epsilon) - debt)
new_pop_target = (min_pop + max_pop) / 2
try:
- nodes = method(
+ node_ids = method(
graph.subgraph(remaining_nodes),
pop_col=pop_col,
pop_target=new_pop_target,
@@ -1035,23 +1760,27 @@ def recursive_tree_part(
except Exception:
raise
- if nodes is None:
+ if node_ids is None:
raise BalanceError()
part_pop = 0
- for node in nodes:
+ for node in node_ids:
flips[node] = part
- part_pop += graph.nodes[node][pop_col]
+ part_pop += graph.node_data(node)[pop_col]
if not check_pop(part_pop):
raise PopulationBalanceError()
debt += part_pop - pop_target
- remaining_nodes -= nodes
+ remaining_nodes -= node_ids
# After making n-2 districts, we need to make sure that the last
# two districts are both balanced.
- nodes = method(
+
+ # frm: For the last call to "method", set one_sided_cut=False to
+ # request that "method" create two equal sized districts
+ # with the given population goal by bisecting the graph.
+ node_ids = method(
graph.subgraph(remaining_nodes),
pop_col=pop_col,
pop_target=pop_target,
@@ -1060,24 +1789,31 @@ def recursive_tree_part(
one_sided_cut=False,
)
- if nodes is None:
+ if node_ids is None:
raise BalanceError()
part_pop = 0
- for node in nodes:
- flips[node] = parts[-2]
- part_pop += graph.nodes[node][pop_col]
+ for node_id in node_ids:
+ flips[node_id] = parts[-2]
+ # frm: this code fragment: graph.node_data(node_id)[pop_col] is used
+ # many times and is a candidate for being wrapped with
+ # a function that has a meaningful name, such as perhaps:
+ # get_population_for_node(node_id, pop_col).
+ # This is an example of code-bloat from the perspective of
+ # code gurus, but it really helps a new code reviewer understand
+ # WTF is going on...
+ part_pop += graph.node_data(node_id)[pop_col]
if not check_pop(part_pop):
raise PopulationBalanceError()
- remaining_nodes -= nodes
+ remaining_nodes -= node_ids
# All of the remaining nodes go in the last part
part_pop = 0
for node in remaining_nodes:
flips[node] = parts[-1]
- part_pop += graph.nodes[node][pop_col]
+ part_pop += graph.node_data(node)[pop_col]
if not check_pop(part_pop):
raise PopulationBalanceError()
@@ -1085,8 +1821,9 @@ def recursive_tree_part(
return flips
-def get_seed_chunks(
- graph: nx.Graph,
+# frm: only used in this file, so I changed the name to have a leading underscore
+def _get_seed_chunks(
+ graph: Graph,
num_chunks: int,
num_dists: int,
pop_target: Union[int, float],
@@ -1100,7 +1837,7 @@ def get_seed_chunks(
balanced within new_epsilon <= ``epsilon`` of a balanced target population.
:param graph: The graph
- :type graph: nx.Graph
+ :type graph: Graph
:param num_chunks: The number of chunks to partition the graph into
:type num_chunks: int
:param num_dists: The number of districts
@@ -1122,22 +1859,49 @@ def get_seed_chunks(
:returns: New assignments for the nodes of ``graph``.
:rtype: List[List[int]]
"""
+
+ # frm: TODO: Refactoring: Change the name of num_chunks_left to instead be
+ # num_districts_per_chunk.
+ # frm: ???: It is not clear to me when num_chunks will not evenly divide num_dists. In
+ # the only place where _get_seed_chunks() is called, it is inside an if-stmt
+ # branch that validates that num_chunks evenly divides num_dists...
+ #
num_chunks_left = num_dists // num_chunks
+
+ # frm: TODO: Refactoring: Change the name of parts below to be something / anything else.
+ # Normally parts refers to districts, but here is is just a way to keep track of
+ # sets of nodes for chunks. Yes - they eventually become districts when this code gets
+ # to the base cases, but I found it confusing at this level...
+ #
parts = range(num_chunks)
+ # frm: ???: I think that new_epsilon is the epsilon to use for each district, in which
+ # case the epsilon passed in would be for the HERE...
new_epsilon = epsilon / (num_chunks_left * num_chunks)
if num_chunks_left == 1:
new_epsilon = epsilon
chunk_pop = 0
for node in graph.node_indices:
- chunk_pop += graph.nodes[node][pop_col]
+ chunk_pop += graph.node_data(node)[pop_col]
+ # frm: TODO: Refactoring: See if there is a better way to structure this instead of a while
+ # True loop...
while True:
epsilon = abs(epsilon)
flips = {}
- remaining_nodes = set(graph.nodes)
-
+ remaining_nodes = graph.node_indices
+
+ # frm: ??? What is the distinction between num_chunks and num_districts?
+ # I think that a chunk is typically a multiple of districts, so
+ # if we want 15 districts we might only ask for 5 chunks. Stated
+ # differently a chunk will always have at least enough nodes
+ # for a given number of districts. As the chunk size gets
+ # smaller, the number of nodes more closely matches what
+ # is needed for a set number of districts.
+
+ # frm: Note: This just scales epsilon by the number of districts for each chunk
+ # so we can get chunks with the appropriate population sizes...
min_pop = pop_target * (1 - new_epsilon) * num_chunks_left
max_pop = pop_target * (1 + new_epsilon) * num_chunks_left
@@ -1146,6 +1910,26 @@ def get_seed_chunks(
diff = min(max_pop - chunk_pop_target, chunk_pop_target - min_pop)
new_new_epsilon = diff / chunk_pop_target
+ # frm: Note: This code is clever... It loops through all of the
+ # parts (districts) except the last, and on each
+ # iteration, it finds nodes for the given part.
+ # Each time through the loop it assigns the
+ # unassigned nodes to the last part, but
+ # most of this gets overwritten by the next
+ # iteration, so that at the end the only nodes
+ # still assigned to the last part are the ones
+ # that had not been previously assigned.
+ #
+ # It works, but is a little too clever for me.
+ #
+ # I would just have assigned all nodes to
+ # the last part before entering the loop
+ # with a comment saying that by end of loop
+ # the nodes not assigned in the loop will
+ # default to the last part.
+ #
+
+ # Assign all nodes to one of the parts
for i in range(len(parts[:-1])):
part = parts[i]
@@ -1168,13 +1952,21 @@ def get_seed_chunks(
for node in remaining_nodes:
flips[node] = parts[-1]
+ # frm: ???: Look at remaining_nodes to see if we are done
part_pop = 0
+ # frm: ???: Compute population total for remaining nodes.
for node in remaining_nodes:
- part_pop += graph.nodes[node][pop_col]
+ part_pop += graph.node_data(node)[pop_col]
+ # frm: ???: Compute what the population total would be for each district in chunk
part_pop_as_dist = part_pop / num_chunks_left
fake_epsilon = epsilon
+ # frm: ???: If the chunk is for more than one district, divide epsilon by two
if num_chunks_left != 1:
fake_epsilon = epsilon / 2
+ # frm: ???: Calculate max and min populations on a district level
+ # This will just be based on epsilon if we only want one district from
+ # chunk, but it will be based on half of epsilon if we want more than one
+ # district from chunk. This is odd - why wouldn't we use an epsilon
min_pop_as_dist = pop_target * (1 - fake_epsilon)
max_pop_as_dist = pop_target * (1 + fake_epsilon)
@@ -1194,9 +1986,11 @@ def get_seed_chunks(
return list(chunks.values())
+# frm: only used in this file
+# But maybe this is intended to be used externally...
def get_max_prime_factor_less_than(n: int, ceil: int) -> Optional[int]:
"""
- Helper function for recursive_seed_part_inner. Returns the largest prime factor of ``n``
+ Helper function for _recursive_seed_part_inner. Returns the largest prime factor of ``n``
less than ``ceil``, or None if all are greater than ceil.
:param n: The number to find the largest prime factor for.
@@ -1230,8 +2024,8 @@ def get_max_prime_factor_less_than(n: int, ceil: int) -> Optional[int]:
return largest_factor
-def recursive_seed_part_inner(
- graph: nx.Graph,
+def _recursive_seed_part_inner(
+ graph: Graph,
num_dists: int,
pop_target: Union[float, int],
pop_col: str,
@@ -1245,6 +2039,16 @@ def recursive_seed_part_inner(
Inner function for recursive_seed_part.
Returns a partition with ``num_dists`` districts balanced within ``epsilon`` of
``pop_target``.
+
+ frm: TODO: Documentation: Correct the above statement that this function returns a
+ partition. In fact, it returns a list of sets of nodes, which is
+ conceptually equivalent to a partition, but is not a Partition object.
+ Each set of nodes constitutes a district, but the district does not
+ have an ID, and there is nothing that associates these nodes
+ with a specific graph - that is implicit, depending on the graph
+ object passed in, so the caller is responsible for knowing that
+ the returned list of sets belongs to the graph passed in...
+
Splits graph into num_chunks chunks, and then recursively splits each chunk into
``num_dists``/num_chunks chunks.
The number num_chunks of chunks is chosen based on ``n`` and ``ceil`` as follows:
@@ -1259,8 +2063,15 @@ def recursive_seed_part_inner(
this function bites off a single district from the graph and recursively partitions
the remaining graph into ``num_dists - 1`` districts.
+ frm: ???: OK, but why is the logic above for num_chunks the correct number? Is there
+ a mathematical reason for it? I assume so, but that explanation is missing...
+
+ I presume that the reason is that something in the code that finds a
+ district scales exponentially, so it makes sense to divide and conquer.
+ Even so, why this particular strategy for divide and conquer?
+
:param graph: The underlying graph structure.
- :type graph: nx.Graph
+ :type graph: Graph
:param num_dists: number of districts to partition the graph into
:type num_dists: int
:param pop_target: Target population for each part of the partition
@@ -1292,6 +2103,18 @@ def recursive_seed_part_inner(
:rtype: List of sets, each set is a district
"""
+ """
+ frm: This code is quite nice once you grok it.
+
+ The goal is to find the given number of districts - but to do it in an
+ efficient way - meaning with smaller graphs. So conceptually, you want
+ to
+ HERE
+
+ There are two base cases when the number of districts still to be found are
+ either 1 or
+
+ """
# Chooses num_chunks
if n is None:
if ceil is None:
@@ -1301,17 +2124,28 @@ def recursive_seed_part_inner(
else:
raise ValueError("ceil must be None or at least 2")
elif n > 1:
+ # frm: Note: This is not guaranteed to evenly divide num_dists
num_chunks = n
else:
raise ValueError("n must be None or a positive integer")
# base case
if num_dists == 1:
- return [set(graph.nodes)]
+ # Just return an assignment with all of the nodes in the graph
+ # Translate the node_ids into parent_node_ids
+ translated_set_of_nodes = graph.translate_subgraph_node_ids_for_set_of_nodes(
+ graph.node_indices
+ )
+ translated_assignment = []
+ translated_assignment.append(translated_set_of_nodes)
+ return translated_assignment
+
+ # frm: In the case when there are exactly 2 districts, split the graph by setting
+ # one_sided_cut to be False.
if num_dists == 2:
nodes = method(
- graph,
+ graph.subgraph(graph.node_indices), # needs to be a subgraph
pop_col=pop_col,
pop_target=pop_target,
epsilon=epsilon,
@@ -1319,11 +2153,38 @@ def recursive_seed_part_inner(
one_sided_cut=False,
)
- return [set(nodes), set(graph.nodes) - set(nodes)]
+ # frm: Note to Self: the name "one_sided_cut" seems unnecessarily opaque. What it really
+ # means is whether to split the graph into two equal districts or
+ # whether to just find one district from a larger graph. When we
+ # clean up this code, consider changing the name of this parameter
+ # to something like: find_two_equal_sized_districts...
+ #
+ # Consider creating a wrapper function which has the better
+ # name that delegates to a private method to do the work.
+
+ nodes_for_one_district = set(nodes)
+ nodes_for_the_other_district = set(graph.node_indices) - nodes_for_one_district
+
+ # Translate the subgraph node_ids into parent_node_ids
+ translated_set_1 = graph.translate_subgraph_node_ids_for_set_of_nodes(
+ nodes_for_one_district
+ )
+ translated_set_2 = graph.translate_subgraph_node_ids_for_set_of_nodes(
+ nodes_for_the_other_district
+ )
+
+ return [translated_set_1, translated_set_2]
# bite off a district and recurse into the remaining subgraph
+ # frm: Note: In the case when num_chunks does not evenly divide num_dists,
+ # just find one district, remove those nodes from
+ # the unassigned nodes and try again with num_dists
+ # set to be one less. Stated differently, reduce
+ # number of desired districts until you get to
+ # one that is evenly divided by num_chunks and then
+ # do chunk stuff...
elif num_chunks is None or num_dists % num_chunks != 0:
- remaining_nodes = set(graph.nodes)
+ remaining_nodes = graph.node_indices
nodes = method(
graph.subgraph(remaining_nodes),
pop_col=pop_col,
@@ -1333,7 +2194,9 @@ def recursive_seed_part_inner(
one_sided_cut=True,
)
remaining_nodes -= nodes
- assignment = [nodes] + recursive_seed_part_inner(
+ # frm: Create a list with the set of nodes returned by method() and then recurse
+ # to get the rest of the sets of nodes for remaining districts.
+ assignment = [nodes] + _recursive_seed_part_inner(
graph.subgraph(remaining_nodes),
num_dists - 1,
pop_target,
@@ -1345,9 +2208,10 @@ def recursive_seed_part_inner(
)
# split graph into num_chunks chunks, and recurse into each chunk
+ # frm: TODO: Documentation: Add documentation for why a subgraph in call below
elif num_dists % num_chunks == 0:
- chunks = get_seed_chunks(
- graph,
+ chunks = _get_seed_chunks(
+ graph.subgraph(graph.node_indices), # needs to be a subgraph
num_chunks,
num_dists,
pop_target,
@@ -1358,9 +2222,9 @@ def recursive_seed_part_inner(
assignment = []
for chunk in chunks:
- chunk_assignment = recursive_seed_part_inner(
+ chunk_assignment = _recursive_seed_part_inner(
graph.subgraph(chunk),
- num_dists // num_chunks,
+ num_dists // num_chunks, # new target number of districts
pop_target,
pop_col,
epsilon,
@@ -1369,12 +2233,27 @@ def recursive_seed_part_inner(
ceil=ceil,
)
assignment += chunk_assignment
+ else:
+ # frm: From the logic above, this should never happen, but if it did
+ # because of a future edit (bug), at least this will catch it
+ # early before really bizarre things happen...
+ raise Exception("_recursive_seed_part_inner(): Should never happen...")
- return assignment
+ # The assignment object that has been created needs to have its
+ # node_ids translated into parent_node_ids
+ translated_assignment = []
+ for set_of_nodes in assignment:
+ translated_set_of_nodes = graph.translate_subgraph_node_ids_for_set_of_nodes(set_of_nodes)
+ translated_assignment.append(translated_set_of_nodes)
+ return translated_assignment
+
+
+# frm TODO: Refactoring: This routine is never called - not in this file and not in any other
+# GerryChain file. Is it intended to be used by end-users? And if so, for what purpose?
def recursive_seed_part(
- graph: nx.Graph,
+ graph: Graph,
parts: Sequence,
pop_target: Union[float, int],
pop_col: str,
@@ -1386,10 +2265,10 @@ def recursive_seed_part(
) -> Dict:
"""
Returns a partition with ``num_dists`` districts balanced within ``epsilon`` of
- ``pop_target`` by recursively splitting graph using recursive_seed_part_inner.
+ ``pop_target`` by recursively splitting graph using _recursive_seed_part_inner.
:param graph: The graph
- :type graph: nx.Graph
+ :type graph: Graph
:param parts: Iterable of part labels (like ``[0,1,2]`` or ``range(4)``
:type parts: Sequence
:param pop_target: Target population for each part of the partition
@@ -1420,9 +2299,24 @@ def recursive_seed_part(
:returns: New assignments for the nodes of ``graph``.
:rtype: dict
"""
+
+ # frm: Note: It is not strictly necessary to use a subgraph in the call below on
+ # _recursive_seed_part_inner(), because the top-level graph has
+ # a _node_id_to_parent_node_id_map that just maps node_ids to themselves.
+ # However, it seemed a good practice to ALWAYS call routines that are intended
+ # to deal with subgraphs, to use a subgraph even when not strictly
+ # necessary. Just one more cognitive load to not have to worry about.
+ #
+ # This probably means that the identity _node_id_to_parent_node_id_map for
+ # top-level graphs will never be used, I still think that it makes sense to
+ # retain it - again, for consistency: Every graph knows how to translate to
+ # parent_node_ids even if it is a top-level graph.
+ #
+ # In short - an agrument based on invariants being a good thing...
+ #
flips = {}
- assignment = recursive_seed_part_inner(
- graph,
+ assignment = _recursive_seed_part_inner(
+ graph.subgraph(graph.node_indices),
len(parts),
pop_target,
pop_col,
diff --git a/gerrychain/updaters/__init__.py b/gerrychain/updaters/__init__.py
index 2b25bb10..34e206cb 100644
--- a/gerrychain/updaters/__init__.py
+++ b/gerrychain/updaters/__init__.py
@@ -10,8 +10,8 @@
from .cut_edges import cut_edges, cut_edges_by_part
from .election import Election
from .flows import compute_edge_flows, flows_from_changes
-from .tally import DataTally, Tally
from .spanning_trees import num_spanning_trees
+from .tally import DataTally, Tally
__all__ = [
"flows_from_changes",
diff --git a/gerrychain/updaters/compactness.py b/gerrychain/updaters/compactness.py
index 7b42e201..8f87bfba 100644
--- a/gerrychain/updaters/compactness.py
+++ b/gerrychain/updaters/compactness.py
@@ -1,8 +1,8 @@
import collections
+from typing import Dict, Set
-from .flows import on_flow
from .cut_edges import on_edge_flow
-from typing import Dict, Set
+from .flows import on_flow
def boundary_nodes(partition, alias: str = "boundary_nodes") -> Set:
@@ -16,13 +16,20 @@ def boundary_nodes(partition, alias: str = "boundary_nodes") -> Set:
:returns: The set of nodes in the partition that are on the boundary.
:rtype: Set
"""
+
+ # Note that the "alias" parameter is used as the attribute name
+ # on the partition - using this "alias" you can retrieve the
+ # the data stored by an updater that uses this routine...
+
if partition.parent:
return partition.parent[alias]
- return {
- node
- for node in partition.graph.nodes
- if partition.graph.nodes[node]["boundary_node"]
- }
+ else:
+ result = {
+ node_id
+ for node_id in partition.graph.node_indices
+ if partition.graph.node_data(node_id)["boundary_node"]
+ }
+ return result
def initialize_exterior_boundaries_as_a_set(partition) -> Dict[int, Set]:
@@ -37,13 +44,12 @@ def initialize_exterior_boundaries_as_a_set(partition) -> Dict[int, Set]:
part_boundaries = collections.defaultdict(set)
for node in partition["boundary_nodes"]:
part_boundaries[partition.assignment.mapping[node]].add(node)
+
return part_boundaries
@on_flow(initialize_exterior_boundaries_as_a_set, alias="exterior_boundaries_as_a_set")
-def exterior_boundaries_as_a_set(
- partition, previous: Set, inflow: Set, outflow: Set
-) -> Set:
+def exterior_boundaries_as_a_set(partition, previous: Set, inflow: Set, outflow: Set) -> Set:
"""
Updater function that responds to the flow of nodes between different partitions.
@@ -63,6 +69,16 @@ def exterior_boundaries_as_a_set(
partition.
:rtype: Set
"""
+ # Compute the new set of boundary nodes for the partition.
+ #
+ # The term, (inflow & graph_boundary), computes new nodes that are boundary nodes.
+ #
+ # the term, (previous | (inflow & graph_boundary)), adds those new boundary nodes to the
+ # set of previous boundary nodes.
+ #
+ # Then all you need to do is subtract all of the nodes in the outflow to remove any of those
+ # that happen to be boundary nodes...
+
graph_boundary = partition["boundary_nodes"]
return (previous | (inflow & graph_boundary)) - outflow
@@ -80,7 +96,7 @@ def initialize_exterior_boundaries(partition) -> Dict[int, float]:
boundaries = collections.defaultdict(lambda: 0)
for node in graph_boundary:
part = partition.assignment.mapping[node]
- boundaries[part] += partition.graph.nodes[node]["boundary_perim"]
+ boundaries[part] += partition.graph.node_data(node)["boundary_perim"]
return boundaries
@@ -107,12 +123,10 @@ def exterior_boundaries(partition, previous: Set, inflow: Set, outflow: Set) ->
"""
graph_boundary = partition["boundary_nodes"]
added_perimeter = sum(
- partition.graph.nodes[node]["boundary_perim"]
- for node in inflow & graph_boundary
+ partition.graph.node_data(node)["boundary_perim"] for node in inflow & graph_boundary
)
removed_perimeter = sum(
- partition.graph.nodes[node]["boundary_perim"]
- for node in outflow & graph_boundary
+ partition.graph.node_data(node)["boundary_perim"] for node in outflow & graph_boundary
)
return previous + added_perimeter - removed_perimeter
@@ -126,19 +140,37 @@ def initialize_interior_boundaries(partition):
perimeter the given part shares with other parts.
:rtype: Dict[int, float]
"""
- return {
- part: sum(
- partition.graph.edges[edge]["shared_perim"]
+
+ # RustworkX Note:
+ #
+ # The old NX code did not distinguish between edges and edge_ids - they were one
+ # and the same. However, in RX an edge is a tuple and an edge_id is an integer.
+ # The edges stored in partition["cut_edges_by_part"] are edges (tuples), so
+ # we need to get the edge_id for each edge in order to access the data for the edge.
+
+ # Get edge_ids for each edge (tuple)
+ edge_ids_for_part = {
+ part: [
+ partition.graph.get_edge_id_from_edge(edge)
for edge in partition["cut_edges_by_part"][part]
+ ]
+ for part in partition.parts
+ }
+
+ # Compute length of the shared perimeter of each part
+ shared_perimeters_for_part = {
+ part: sum(
+ partition.graph.edge_data(edge_id)["shared_perim"]
+ for edge_id in edge_ids_for_part[part]
)
for part in partition.parts
}
+ return shared_perimeters_for_part
+
@on_edge_flow(initialize_interior_boundaries, alias="interior_boundaries")
-def interior_boundaries(
- partition, previous: Set, new_edges: Set, old_edges: Set
-) -> Dict:
+def interior_boundaries(partition, previous: Set, new_edges: Set, old_edges: Set) -> Dict:
"""
Updater function that responds to the flow of nodes between different partitions.
@@ -159,11 +191,14 @@ def interior_boundaries(
boundary of that part.
:rtype: Dict
"""
+
added_perimeter = sum(
- partition.graph.edges[edge]["shared_perim"] for edge in new_edges
+ partition.graph.edge_data(partition.graph.get_edge_id_from_edge(edge))["shared_perim"]
+ for edge in new_edges
)
removed_perimeter = sum(
- partition.graph.edges[edge]["shared_perim"] for edge in old_edges
+ partition.graph.edge_data(partition.graph.get_edge_id_from_edge(edge))["shared_perim"]
+ for edge in old_edges
)
return previous + added_perimeter - removed_perimeter
@@ -177,6 +212,7 @@ def flips(partition) -> Dict:
given partition.
:rtype: Dict
"""
+ # frm: ???: Does anyone ever use this? It seems kind of useless...
return partition.flips
@@ -184,7 +220,7 @@ def perimeter_of_part(partition, part: int) -> float:
"""
Totals up the perimeter of the part in the partition.
- .. Warning::
+ .. Warning:: frm: TODO: Refactoring: Add code to enforce this warning...
Requires that 'boundary_perim' be a node attribute, 'shared_perim' be an edge
attribute, 'cut_edges' be an updater, and 'exterior_boundaries' be an updater.
diff --git a/gerrychain/updaters/county_splits.py b/gerrychain/updaters/county_splits.py
index fad28f4c..a0503e5d 100644
--- a/gerrychain/updaters/county_splits.py
+++ b/gerrychain/updaters/county_splits.py
@@ -2,7 +2,6 @@
from enum import Enum
from typing import Callable, Dict
-
CountyInfo = collections.namedtuple("CountyInfo", "split nodes contains")
"""
A named tuple to store county split information.
@@ -79,21 +78,29 @@ def compute_county_splits(
# Create the initial county data containers.
if not partition.parent:
+
county_dict = dict()
- for node in partition.graph.node_indices:
- county = partition.graph.lookup(node, county_field)
+ for node_id in partition.graph.node_indices:
+
+ # First figure get current status of the county's information
+ county = partition.graph.node_data(node_id)[county_field]
if county in county_dict:
split, nodes, seen = county_dict[county]
else:
split, nodes, seen = CountySplit.NOT_SPLIT, [], set()
- nodes.append(node)
- seen.update(set([partition.assignment.mapping[node]]))
+ # Now update "nodes" and "seen" with this node_id and the part (district) from
+ # partition's assignment.
+ nodes.append(node_id)
+ seen.update(set([partition.assignment.mapping[node_id]]))
+ # lastly, if we have "seen" more than one part (district), then the county is split
+ # across parts.
if len(seen) > 1:
split = CountySplit.OLD_SPLIT
+ # update the county_dict with new information
county_dict[county] = CountyInfo(split, nodes, seen)
return county_dict
@@ -102,7 +109,7 @@ def compute_county_splits(
parent = partition.parent
for county, county_info in parent[partition_field].items():
- seen = set(partition.assignment.mapping[node] for node in county_info.nodes)
+ seen = set(partition.assignment.mapping[node_id] for node_id in county_info.nodes)
split = CountySplit.NOT_SPLIT
@@ -135,9 +142,7 @@ def _get_splits(partition):
nonlocal reg_attr_lst
if "cut_edges" not in partition.updaters:
raise ValueError("The cut_edges updater must be attached to the partition")
- return {
- reg_attr: total_reg_splits(partition, reg_attr) for reg_attr in reg_attr_lst
- }
+ return {reg_attr: total_reg_splits(partition, reg_attr) for reg_attr in reg_attr_lst}
return _get_splits
@@ -145,17 +150,17 @@ def _get_splits(partition):
def total_reg_splits(partition, reg_attr):
"""Returns the total number of times that reg_attr is split in the partition."""
all_region_names = set(
- partition.graph.nodes[node][reg_attr] for node in partition.graph.nodes
+ partition.graph.node_data(node_id)[reg_attr] for node_id in partition.graph.node_indices
)
split = {name: 0 for name in all_region_names}
# Require that the cut_edges updater is attached to the partition
for node1, node2 in partition["cut_edges"]:
if (
partition.assignment[node1] != partition.assignment[node2]
- and partition.graph.nodes[node1][reg_attr]
- == partition.graph.nodes[node2][reg_attr]
+ and partition.graph.node_data(node1)[reg_attr]
+ == partition.graph.node_data(node2)[reg_attr]
):
- split[partition.graph.nodes[node1][reg_attr]] += 1
- split[partition.graph.nodes[node2][reg_attr]] += 1
+ split[partition.graph.node_data(node1)[reg_attr]] += 1
+ split[partition.graph.node_data(node2)[reg_attr]] += 1
return sum(1 for value in split.values() if value > 0)
diff --git a/gerrychain/updaters/cut_edges.py b/gerrychain/updaters/cut_edges.py
index 7fac766e..fe1a2ec3 100644
--- a/gerrychain/updaters/cut_edges.py
+++ b/gerrychain/updaters/cut_edges.py
@@ -1,31 +1,32 @@
import collections
from typing import Dict, List, Set, Tuple
-from .flows import on_edge_flow, neighbor_flips
+from .flows import neighbor_flips, on_edge_flow
-def put_edges_into_parts(edges: List, assignment: Dict) -> Dict:
+
+def _put_edges_into_parts(cut_edges: List, assignment: Dict) -> Dict:
"""
- :param edges: A list of edges in a graph which are to be separated
+ :param cut_edges: A list of cut_edges in a graph which are to be separated
into their respective parts within the partition according to
the given assignment.
- :type edges: List
+ :type cut_edges: List
:param assignment: A dictionary mapping nodes to their respective
parts within the partition.
:type assignment: Dict
- :returns: A dictionary mapping each part of a partition to the set of edges
+ :returns: A dictionary mapping each part of a partition to the set of cut_edges
in that part.
:rtype: Dict
"""
by_part = collections.defaultdict(set)
- for edge in edges:
+ for edge in cut_edges:
# add edge to the sets corresponding to the parts it touches
by_part[assignment.mapping[edge[0]]].add(edge)
by_part[assignment.mapping[edge[1]]].add(edge)
return by_part
-def new_cuts(partition) -> Set[Tuple]:
+def _new_cuts(partition) -> Set[Tuple]:
"""
:param partition: A partition of a Graph
:type partition: :class:`~gerrychain.partition.Partition`
@@ -40,7 +41,7 @@ def new_cuts(partition) -> Set[Tuple]:
}
-def obsolete_cuts(partition) -> Set[Tuple]:
+def _obsolete_cuts(partition) -> Set[Tuple]:
"""
:param partition: A partition of a Graph
:type partition: :class:`~gerrychain.partition.Partition`
@@ -61,22 +62,44 @@ def initialize_cut_edges(partition):
:param partition: A partition of a Graph
:type partition: :class:`~gerrychain.partition.Partition`
+ frm: TODO: Documentation This description should be updated. Cut_edges are edges that touch
+ two different parts (districts). They are the internal boundaries
+ between parts (districts). This routine finds all of the cut_edges
+ in the graph and then creates a dict that stores all of the cut_edges
+ for each part (district). This dict becomes the value of
+ partition["cut_edges"].
+
+ Peter agreed:
+ Ah, you are correct. It maps parts to cut edges, not just any edges in the
+ partition
+
+
+
:returns: A dictionary mapping each part of a partition to the set of edges
in that part.
:rtype: Dict
"""
- edges = {
+ # Compute the set of edges that are "cut_edges" - that is, edges that go from
+ # one part (district) to another.
+ cut_edges = {
tuple(sorted(edge))
+ # frm: edges vs edge_ids: edges are wanted here (tuples)
for edge in partition.graph.edges
if partition.crosses_parts(edge)
}
- return put_edges_into_parts(edges, partition.assignment)
+ return _put_edges_into_parts(cut_edges, partition.assignment)
@on_edge_flow(initialize_cut_edges, alias="cut_edges_by_part")
def cut_edges_by_part(
partition, previous: Set[Tuple], new_edges: Set[Tuple], old_edges: Set[Tuple]
) -> Set[Tuple]:
+ #
+ # frm TODO: Documentation: Update / expand the documentation for this routine.
+ #
+ # This only operates on cut-edges and not on all of the
+ # edges in a partition. A "cut-edge" is an edge that spans two districts.
+ #
"""
Updater function that responds to the flow of edges between different partitions.
@@ -108,13 +131,11 @@ def cut_edges(partition):
if not parent:
return {
- tuple(sorted(edge))
- for edge in partition.graph.edges
- if partition.crosses_parts(edge)
+ tuple(sorted(edge)) for edge in partition.graph.edges if partition.crosses_parts(edge)
}
# Edges that weren't cut, but now are cut
# We sort the tuples to make sure we don't accidentally end
# up with both (4,5) and (5,4) (for example) in it
- new, obsolete = new_cuts(partition), obsolete_cuts(partition)
+ new, obsolete = _new_cuts(partition), _obsolete_cuts(partition)
return (parent["cut_edges"] | new) - obsolete
diff --git a/gerrychain/updaters/election.py b/gerrychain/updaters/election.py
index 2415de42..7ac70aea 100644
--- a/gerrychain/updaters/election.py
+++ b/gerrychain/updaters/election.py
@@ -1,7 +1,8 @@
import math
from typing import Dict, List, Optional, Tuple, Union
-from gerrychain.updaters.tally import DataTally
+
import gerrychain.metrics.partisan as pm
+from gerrychain.updaters.tally import DataTally
class Election:
@@ -48,12 +49,12 @@ class Election:
:type name: str
:ivar parties: A list of the names of the parties in the election.
:type parties: List[str]
- :ivar columns: A list of the columns in the graph's node data that hold
- the vote totals for each party.
- :type columns: List[str]
- :ivar parties_to_columns: A dictionary mapping party names to the columns
- in the graph's node data that hold the vote totals for that party.
- :type parties_to_columns: Dict[str, str]
+ :ivar node_attribute_names: A list of the node_attribute_names in the graph's node data that
+ hold the vote totals for each party.
+ :type node_attribute_names: List[str]
+ :ivar party_names_to_node_attribute_names: A dictionary mapping party names to the
+ node_attribute_names in the graph's node data that hold the vote totals for that party.
+ :type party_names_to_node_attribute_names: Dict[str, str]
:ivar tallies: A dictionary mapping party names to :class:`DataTally` objects
that manage the vote totals for that party.
:type tallies: Dict[str, DataTally]
@@ -68,54 +69,117 @@ class Election:
def __init__(
self,
name: str,
- parties_to_columns: Union[Dict, List],
+ party_names_to_node_attribute_names: Union[Dict, List],
alias: Optional[str] = None,
) -> None:
"""
:param name: The name of the election. (e.g. "2008 Presidential")
:type name: str
- :param parties_to_columns: A dictionary matching party names to their
- data columns, either as actual columns (list-like, indexed by nodes)
- or as string keys for the node attributes that hold the party's
- vote totals. Or, a list of strings which will serve as both
- the party names and the node attribute keys.
- :type parties_to_columns: Union[Dict, List]
+ :param party_names_to_node_attribute_names: A mapping from the name of a
+ party to the name of an attribute of a node that contains the
+ vote totals for that party. This parameter can be either a list or
+ a dict. If a list, then the name of the party and the name of the
+ node attribute are the same, for instance: ["Dem", "Rep"] would
+ indicate that the "Dem" party vote totals are stored in the "Dem"
+ node attribute. If a list, then there are two possibilities.
+
+ A dictionary matching party names to their
+ data node_attribute_names, either as actual node_attribute_names (list-like, indexed
+ by nodes) or as string keys for the node attributes that hold the party's vote totals.
+ Or, a list of strings which will serve as both the party names and the node attribute
+ keys.
+ :type party_names_to_node_attribute_names: Union[Dict, List]
:param alias: Alias that the election is registered under
in the Partition's dictionary of updaters.
:type alias: Optional[str], optional
"""
+
self.name = name
if alias is None:
alias = name
self.alias = alias
- if isinstance(parties_to_columns, dict):
- self.parties = list(parties_to_columns.keys())
- self.columns = list(parties_to_columns.values())
- self.parties_to_columns = parties_to_columns
- elif isinstance(parties_to_columns, list):
- self.parties = parties_to_columns
- self.columns = parties_to_columns
- self.parties_to_columns = dict(zip(self.parties, self.columns))
+ # Canonicalize "parties", "node_attribute_names", and "party_names_to_node_attribute_names":
+ #
+ # "parties" are the names of the parties for purposes of reporting
+ # "node_attribute_names" are the names of the node attributes storing vote counts
+ # "party_names_to_node_attribute_names" is a mapping from one to the other
+ #
+ if isinstance(party_names_to_node_attribute_names, dict):
+ self.parties = list(party_names_to_node_attribute_names.keys())
+ self.node_attribute_names = list(party_names_to_node_attribute_names.values())
+ self.party_names_to_node_attribute_names = party_names_to_node_attribute_names
+ elif isinstance(party_names_to_node_attribute_names, list):
+ # name of the party and the attribute name containing value is the same
+ self.parties = party_names_to_node_attribute_names
+ self.node_attribute_names = party_names_to_node_attribute_names
+ self.party_names_to_node_attribute_names = dict(
+ zip(self.parties, self.node_attribute_names)
+ )
else:
- raise TypeError("Election expects parties_to_columns to be a dict or list")
+ raise TypeError(
+ "Election expects party_names_to_node_attribute_names to be a dict or list"
+ )
+
+ # frm: TODO: Documentation: Migration: Using node_ids to vote tally maps...
+ #
+ # A DataTally used to support a first parameter that was either a string
+ # or a dict.
+ #
+ # The idea was that in most cases, the values to be tallied would be present
+ # as the values of attributes associated with nodes, so it made sense to just
+ # provide the name of the attribute (a string) to identify what to tally.
+ #
+ # However, the code also supported providing an explicit mapping from node_id
+ # to the value to be tallied (a dict). This was useful for testing because
+ # it allowed for tallying values without having to implement an updater that
+ # would be based on a node's attribute. It provided a way to map values that
+ # were not part of the graph to vote totals.
+ #
+ # The problem was that when we started using RX for the embedded graph for
+ # partitions, the node_ids were no longer the same as the ones the user
+ # specified when creating the (NX) graph. This complicated the logic of
+ # having an explicit mapping from node_id to a value to be tallied - to
+ # make this work the code would have needed to translate the node_ids into
+ # the internal RX node_ids.
+ #
+ # The decision was made (Fred and Peter) that this extra complexity was not
+ # worth the trouble, so we now disallow passing in an explicit mapping (dict).
+ #
+
+ for party in self.parties:
+ if isinstance(self.party_names_to_node_attribute_names[party], dict):
+ raise Exception(
+ "Election: Using a map from node_id to vote totals is no longer permitted"
+ )
self.tallies = {
- party: DataTally(self.parties_to_columns[party], party)
+ party: DataTally(self.party_names_to_node_attribute_names[party], party)
for party in self.parties
}
self.updater = ElectionUpdater(self)
+ def _initialize_self(self, partition):
+
+ # Create DataTally objects for each party in the election.
+ self.tallies = {
+ # For each party, create a DataTally using the string for the node
+ # attribute where that party's vote totals can be found.
+ party: DataTally(self.party_names_to_node_attribute_names[party], party)
+ for party in self.parties
+ }
+
def __str__(self):
- return "Election '{}' with vote totals for parties {} from columns {}.".format(
- self.name, str(self.parties), str(self.columns)
+ return (
+ f"Election '{self.name}' with vote totals for parties {self.parties} "
+ f"from node_attribute_names {self.node_attribute_names}."
)
def __repr__(self):
- return "Election(parties={}, columns={}, alias={})".format(
- str(self.parties), str(self.columns), str(self.alias)
+ return "Election(parties={}, node_attribute_names={}, alias={})".format(
+ str(self.parties), str(self.node_attribute_names), str(self.alias)
)
def __call__(self, *args, **kwargs):
@@ -161,12 +225,14 @@ def get_previous_values(self, partition) -> Dict[str, Dict[int, float]]:
if parent is None:
previous_totals_for_party = {party: None for party in self.election.parties}
else:
- previous_totals_for_party = partition.parent[
- self.election.alias
- ].totals_for_party
+ previous_totals_for_party = partition.parent[self.election.alias].totals_for_party
return previous_totals_for_party
+# frm: TODO: Refactoring: This routine, get_percents(), is only ever used inside ElectionResults.
+#
+# Why is it not defined as an internal function inside ElectionResults?
+#
def get_percents(counts: Dict, totals: Dict) -> Dict:
"""
:param counts: A dictionary mapping each part in a partition to the
@@ -179,10 +245,7 @@ def get_percents(counts: Dict, totals: Dict) -> Dict:
:returns: A dictionary mapping each part in a partition to the percentage
:rtype: Dict
"""
- return {
- part: counts[part] / totals[part] if totals[part] > 0 else math.nan
- for part in totals
- }
+ return {part: counts[part] / totals[part] if totals[part] > 0 else math.nan for part in totals}
class ElectionResults:
@@ -239,8 +302,7 @@ def __init__(
}
self.percents_for_party = {
- party: get_percents(counts[party], self.totals)
- for party in election.parties
+ party: get_percents(counts[party], self.totals) for party in election.parties
}
def __str__(self):
@@ -287,9 +349,7 @@ def percent(self, party: str, region: Optional[int] = None) -> float:
"""
if region is not None:
return self.percents_for_party[party][region]
- return sum(self.votes(party)) / sum(
- self.totals[region] for region in self.regions
- )
+ return sum(self.votes(party)) / sum(self.totals[region] for region in self.regions)
def percents(self, party: str) -> Tuple:
"""
@@ -353,8 +413,7 @@ def won(self, party: str, region: str) -> bool:
:rtype: bool
"""
return all(
- self.totals_for_party[party][region]
- > self.totals_for_party[opponent][region]
+ self.totals_for_party[party][region] > self.totals_for_party[opponent][region]
for opponent in self.election.parties
if opponent != party
)
@@ -422,9 +481,7 @@ def partisan_gini(self) -> float:
return pm.partisan_gini(self)
-def format_part_results(
- percents_for_party: Dict[str, Dict[int, float]], part: int
-) -> str:
+def format_part_results(percents_for_party: Dict[str, Dict[int, float]], part: int) -> str:
"""
:param percents_for_party: A dictionary mapping party names to a dict
containing the percentage of votes that party received in each part
diff --git a/gerrychain/updaters/flows.py b/gerrychain/updaters/flows.py
index bf00096b..b0edcad6 100644
--- a/gerrychain/updaters/flows.py
+++ b/gerrychain/updaters/flows.py
@@ -1,6 +1,10 @@
import collections
import functools
-from typing import Dict, Set, Tuple, Callable
+from typing import Callable, Dict, Set, Tuple
+
+# frm: TODO: Documentation: This file needs documentation / comments!!!
+#
+# Peter agrees...
@functools.lru_cache(maxsize=2)
@@ -36,6 +40,13 @@ def flows_from_changes(old_partition, new_partition) -> Dict:
`{'in': , 'out': }`.
:rtype: Dict
"""
+
+ # frm: TODO: Code: ???: Grok why there is a test for: source != target
+ #
+ # It would seem to me that it would be a logic bug if there
+ # was a "flip" that did not in fact change the partition mapping...
+ #
+
flows = collections.defaultdict(create_flow)
for node, target in new_partition.flips.items():
source = old_partition.assignment.mapping[node]
@@ -95,9 +106,7 @@ def wrapped(partition, previous=None):
new_values = previous.copy()
for part, flow in partition.flows.items():
- new_values[part] = function(
- partition, previous[part], flow["in"], flow["out"]
- )
+ new_values[part] = function(partition, previous[part], flow["in"], flow["out"])
return new_values
@@ -129,18 +138,40 @@ def compute_edge_flows(partition) -> Dict:
new_source = assignment.mapping[node]
new_target = assignment.mapping[neighbor]
- cut = new_source != new_target
- was_cut = old_source != old_target
+ # frm: Clarification to myself...
+ # A "cut edge" is one where the nodes in the edge are assigned to different
+ # districts. So, how does a flip change whether an edge is a cut edge? There
+ # are three possibilities: 1) the edge goes from not being a cut edge to being
+ # a cut edge, 2) the edge goes from being a cut edge to not being a cut edge,
+ # and 3) the edge was a cut edge before and is still a cut edge after the flip,
+ # but the partition assignments to one or the other nodes in the edge changes.
+ #
+ # That is what the if-stmt below is doing - determining which of the three
+ # cases each flip falls into. It updates the flows accordingly...
+ #
+ cut = new_source != new_target # after flip, the edge is a cut edge
+ was_cut = old_source != old_target # before flip, the edge was a cut edge
if not cut and was_cut:
+ # was a cut edge before, but now is not, so flows out of both
edge_flows[old_target]["out"].add(edge)
edge_flows[old_source]["out"].add(edge)
elif cut and not was_cut:
+ # was not a cut edge before, but now is, so flows into both
edge_flows[new_target]["in"].add(edge)
edge_flows[new_source]["in"].add(edge)
elif cut and was_cut:
# If an edge was cut and still is cut, we need to make sure the
# edge is listed under the correct parts.
+ # frm: Clarification to myself... Python set subtraction will delete
+ # from the set on the left any members of the set on the right,
+ # so no_longer_incident_parts will determine if either old_target,
+ # or old_source has changed - that is, whether the assignment of
+ # the one of the old mappings has changed - if so, the edge has
+ # gone "out" of that partition. If you do the subtraction the
+ # other way, you find whether the new mappings have changed
+ # and you can then update the "in" flows
+ #
no_longer_incident_parts = {old_target, old_source} - {
new_target,
new_source,
@@ -151,6 +182,7 @@ def compute_edge_flows(partition) -> Dict:
newly_incident_parts = {new_target, new_source} - {old_target, old_source}
for part in newly_incident_parts:
edge_flows[part]["in"].add(edge)
+
return edge_flows
diff --git a/gerrychain/updaters/locality_split_scores.py b/gerrychain/updaters/locality_split_scores.py
index 28720b2f..f1418781 100644
--- a/gerrychain/updaters/locality_split_scores.py
+++ b/gerrychain/updaters/locality_split_scores.py
@@ -1,9 +1,20 @@
# Imports
-from collections import defaultdict, Counter
-import networkx as nx
import math
+from collections import Counter, defaultdict
from typing import List
+# frm TODO: Refactoring: Remove dependence on NetworkX.
+# The only use is:
+# pieces += nx.number_connected_components(subgraph)
+
+# frm: TODO: Performance: Do performance testing and improve performance of these routines.
+#
+# Peter made the comment in a PR that we should make this code more efficient:
+#
+# A note on this file: A ton of the code in here is inefficient. This was
+# made 6 years ago and hasn't really been touched since then other than
+# when I was doing an overhaul on many of the doc strings
+
class LocalitySplits:
"""
@@ -134,13 +145,31 @@ def __init__(
def __call__(self, partition):
+ # frm: TODO: Refactoring: LocalitySplits: Figure out how this is intended to be used...
+ #
+ # Not quite sure why it is better to have a "__call()__" method instead of a
+ # get_scores(self) method, but whatever...
+ #
+ # This routine indeed just computes the requested scores (specified in the constructor).
+ # It stashed those scores as a data member in the class and returns them to the caller as
+ # well.
+ #
+ # This all seems kind of misguided to me - and there is no instance of this being used in
+ # the gerrychain code except in a test, so I am not sure how it is intended to be used.
+ #
+ # Probably need to look at some user code that Peter sent me to see if anyone actually uses
+ # this and if so, how...
+ #
+
if self.localities == []:
- self.localitydict = dict(partition.graph.nodes(data=self.col_id))
+ self.localitydict = {}
+ for node_id in partition.graph.node_indices:
+ self.localitydict[node_id] = partition.graph.node_data(node_id)[self.col_id]
+
self.localities = set(list(self.localitydict.values()))
locality_splits = {
- k: [self.localitydict[v] for v in d]
- for k, d in partition.assignment.parts.items()
+ k: [self.localitydict[v] for v in d] for k, d in partition.assignment.parts.items()
}
self.locality_splits = {k: Counter(v) for k, v in locality_splits.items()}
@@ -154,23 +183,54 @@ def __call__(self, partition):
allowed_pieces = {}
totpop = 0
- for node in partition.graph.nodes:
- totpop += partition.graph.nodes[node][self.pop_col]
+ for node_id in partition.graph.node_indices:
+ # frm: TODO: Refactoring: Once you have a partition, you cannot change the
+ # total population in the Partition, so why don't we cache the total
+ # population as a data member in Partition?
+ #
+ # Peter agreed that this would be a good thing to do
- num_districts = len(partition.assignment.parts.keys())
+ totpop += partition.graph.node_data(node_id)[self.pop_col]
- for loc in self.localities:
- sg = partition.graph.subgraph(
- n
- for n, v in partition.graph.nodes(data=True)
- if v[self.col_id] == loc
- )
+ # frm: TODO: Refactoring: Ditto with num_districts - isn't this a constant once you
+ # create a Partition?
+ #
+ # Peter agreed that this would be a good thing to do.
- pop = 0
- for n in sg.nodes():
- pop += sg.nodes[n][self.pop_col]
+ num_districts = len(partition.assignment.parts.keys())
+
+ # Compute the total population for each locality and then the number of
+ # "allowed pieces"
+ for _ in self.localities:
+ # frm: TODO: Refactoring: The code below just calculates the total population
+ # for a set of nodes. This sounds like a good candidate for a utility
+ # function. See if this logic is repeated elsewhere...
+
+ # Compute the population associated with each location
+ the_graph = partition.graph
+ locality_population = (
+ {}
+ ) # dict mapping locality name to population in that locality
+ for node_id in the_graph.node_indices:
+ locality_name = the_graph.node_data(node_id)[self.col_id]
+ locality_pop = the_graph.node_data(node_id)[self.pop_col]
+ if locality_name not in locality_population:
+ locality_population[locality_name] = locality_pop
+ else:
+ locality_population[locality_name] += locality_pop
+
+ # frm: TODO: Refactoring: Peter commented (in PR) that this is another thing that
+ # could be cached so we didn't recompute it over and over...
+ ideal_population_per_district = totpop / num_districts
+
+ # Compute the number of "allowed pieces" for each locality
+ allowed_pieces = {}
+ for locality_name in locality_population.keys():
+ pop_for_locality = locality_population[locality_name]
+ allowed_pieces[locality_name] = math.ceil(
+ pop_for_locality / ideal_population_per_district
+ )
- allowed_pieces[loc] = math.ceil(pop / (totpop / num_districts))
self.allowed_pieces = allowed_pieces
for s in self.scores:
@@ -227,12 +287,10 @@ def num_pieces(self, partition) -> int:
"""
locality_intersections = {}
- for n in partition.graph.nodes():
- locality = partition.graph.nodes[n][self.col_id]
+ for n in partition.graph.node_indices:
+ locality = partition.graph.node_data(n)[self.col_id]
if locality not in locality_intersections:
- locality_intersections[locality] = set(
- [partition.assignment.mapping[n]]
- )
+ locality_intersections[locality] = set([partition.assignment.mapping[n]])
locality_intersections[locality].update([partition.assignment.mapping[n]])
@@ -243,11 +301,11 @@ def num_pieces(self, partition) -> int:
[
x
for x in partition.parts[d]
- if partition.graph.nodes[x][self.col_id] == locality
+ if partition.graph.node_data(x)[self.col_id] == locality
]
)
- pieces += nx.number_connected_components(subgraph)
+ pieces += subgraph.num_connected_components()
return pieces
def naked_boundary(self, partition) -> int:
@@ -285,7 +343,7 @@ def shannon_entropy(self, partition) -> float:
"""
total_vtds = 0
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
for x in list(v.values()):
total_vtds += x
@@ -293,7 +351,7 @@ def shannon_entropy(self, partition) -> float:
for locality_j in self.localities: # iter thru locs to get total count
tot_county_vtds = 0
# iter thru counters
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
v = dict(v)
if locality_j in list(v.keys()):
tot_county_vtds += v[locality_j]
@@ -303,7 +361,7 @@ def shannon_entropy(self, partition) -> float:
# iter thru districts to get vtds in county in district
# for district in range(num_districts):
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
# counter = dict(locality_splits[district+1])
count = dict(v)
if locality_j in count:
@@ -328,7 +386,7 @@ def power_entropy(self, partition) -> float:
"""
total_vtds = 0 # count the total number of vtds in state
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
for x in list(v.values()):
total_vtds += x
@@ -336,7 +394,7 @@ def power_entropy(self, partition) -> float:
for locality_j in self.localities: # iter thru locs to get total count
tot_county_vtds = 0
# iter thru counters
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
v = dict(v)
if locality_j in list(v.keys()):
tot_county_vtds += v[locality_j]
@@ -346,7 +404,7 @@ def power_entropy(self, partition) -> float:
q = tot_county_vtds / total_vtds
# iter thru districts to get vtds in county in district
# for district in range(num_districts):
- for k, v in self.locality_splits.items():
+ for v in self.locality_splits.values():
# counter = dict(locality_splits[district+1])
count = dict(v)
if locality_j in count:
@@ -380,9 +438,7 @@ def symmetric_entropy(self, partition) -> float: # IN PROGRESS
vtds = district_dict[district]
locality_pop = {k: 0 for k in self.localities}
for vtd in vtds:
- locality_pop[self.localitydict[vtd]] += partition.graph.nodes[vtd][
- self.pop_col
- ]
+ locality_pop[self.localitydict[vtd]] += partition.graph.node_data(vtd)[self.pop_col]
district_dict[district] = locality_pop
district_dict_inv = defaultdict(dict)
diff --git a/gerrychain/updaters/spanning_trees.py b/gerrychain/updaters/spanning_trees.py
index 307daf40..6dc39eed 100644
--- a/gerrychain/updaters/spanning_trees.py
+++ b/gerrychain/updaters/spanning_trees.py
@@ -3,10 +3,10 @@
"""
import math
-import numpy
-import networkx
from typing import Dict
+import numpy
+
def _num_spanning_trees_in_district(partition, district: int) -> int:
"""
@@ -24,8 +24,7 @@ def _num_spanning_trees_in_district(partition, district: int) -> int:
partition corresponding to district
:rtype: int
"""
- graph = partition.subgraphs[district]
- laplacian = networkx.laplacian_matrix(graph)
+ laplacian = partition.graph.laplacian_matrix()
L = numpy.delete(numpy.delete(laplacian.todense(), 0, 0), 1, 1)
return math.exp(numpy.linalg.slogdet(L)[1])
@@ -35,7 +34,4 @@ def num_spanning_trees(partition) -> Dict[int, int]:
:returns: The number of spanning trees in each part (district) of a partition.
:rtype: Dict[int, int]
"""
- return {
- part: _num_spanning_trees_in_district(partition, part)
- for part in partition.parts
- }
+ return {part: _num_spanning_trees_in_district(partition, part) for part in partition.parts}
diff --git a/gerrychain/updaters/tally.py b/gerrychain/updaters/tally.py
index 97305b38..e4e10208 100644
--- a/gerrychain/updaters/tally.py
+++ b/gerrychain/updaters/tally.py
@@ -1,11 +1,12 @@
import collections
import math
import warnings
+from typing import Dict, List, Optional, Type, Union
-from .flows import flows_from_changes, on_flow
-from typing import Dict, Union, List, Optional, Type
import pandas
+from .flows import flows_from_changes, on_flow
+
class DataTally:
"""
@@ -19,6 +20,16 @@ class DataTally:
:type alias: str
"""
+ # frm: TODO: Code: Check to see if DataTally used for data that is NOT attribute of a node
+ #
+ # The comment above indicates that you can use a DataTally for data that is not stored
+ # as an attribute of a node. Check to see if it is ever actually used that way. If so,
+ # then update the documentation above to state the use cases for adding up data that is
+ # NOT stored as a node attribute...
+ #
+ # It appears that some tests use the ability to specify tallies that do not involve a
+ # node attribute, but it is not clear if any "real" code does that...
+
__slots__ = ["data", "alias", "_call"]
def __init__(self, data: Union[Dict, pandas.Series, str], alias: str) -> None:
@@ -35,23 +46,40 @@ def __init__(self, data: Union[Dict, pandas.Series, str], alias: str) -> None:
self.alias = alias
def initialize_tally(partition):
+
+ # If the "data" passed in was a string, then interpret that string
+ # as the name of a node attribute in the graph, and construct
+ # a dict of the form: {node_id: node_attribution_value}
+ #
+ # If not, then assume that the "data" passed in is already of the
+ # form: {node_id: data_value}
+
if isinstance(self.data, str):
- nodes = partition.graph.nodes
+
+ # if the "data" passed in was a string, then replace its value with
+ # a dict of {node_id: attribute_value of the node}
+ graph = partition.graph
+ node_ids = partition.graph.node_indices
attribute = self.data
- self.data = {node: nodes[node][attribute] for node in nodes}
+ self.data = {node_id: graph.node_data(node_id)[attribute] for node_id in node_ids}
tally = collections.defaultdict(int)
- for node, part in partition.assignment.items():
- add = self.data[node]
+ for node_id, part in partition.assignment.items():
+ add = self.data[node_id]
+ # Note: math.isnan() will raise an exception if the value passed in is not
+ # numeric, so there is no need to do another check to ensure that the value
+ # is numeric - that test is implicit in math.isnan()
+ #
if math.isnan(add):
warnings.warn(
- "ignoring nan encountered at node '{}' for attribute '{}'".format(
- node, self.alias
+ "ignoring nan encountered at node_id '{}' for attribute '{}'".format(
+ node_id, self.alias
)
)
else:
tally[part] += add
+
return dict(tally)
@on_flow(initialize_tally, alias=alias)
@@ -167,7 +195,7 @@ def _update_tally(self, partition):
return new_tally
def _get_tally_from_node(self, partition, node):
- return sum(partition.graph.lookup(node, field) for field in self.fields)
+ return sum(partition.graph.node_data(node)[field] for field in self.fields)
def compute_out_flow(graph, fields: Union[str, List[str]], flow: Dict) -> int:
@@ -185,7 +213,7 @@ def compute_out_flow(graph, fields: Union[str, List[str]], flow: Dict) -> int:
:returns: The sum of the "field" attribute of nodes in the "out" set of the flow.
:rtype: int
"""
- return sum(graph.lookup(node, field) for node in flow["out"] for field in fields)
+ return sum(graph.node_data(node)[field] for node in flow["out"] for field in fields)
def compute_in_flow(graph, fields: Union[str, List[str]], flow: Dict) -> int:
@@ -203,4 +231,4 @@ def compute_in_flow(graph, fields: Union[str, List[str]], flow: Dict) -> int:
:returns: The sum of the "field" attribute of nodes in the "in" set of the flow.
:rtype: int
"""
- return sum(graph.lookup(node, field) for node in flow["in"] for field in fields)
+ return sum(graph.node_data(node)[field] for node in flow["in"] for field in fields)
diff --git a/gerrychain/vendor/utm/__init__.py b/gerrychain/vendor/utm/__init__.py
index 45c5ec47..b42faf24 100644
--- a/gerrychain/vendor/utm/__init__.py
+++ b/gerrychain/vendor/utm/__init__.py
@@ -1,8 +1,8 @@
from .conversion import (
- to_latlon,
+ check_valid_zone,
from_latlon,
- latlon_to_zone_number,
latitude_to_zone_letter,
- check_valid_zone,
+ latlon_to_zone_number,
+ to_latlon,
)
from .error import OutOfRangeError
diff --git a/gerrychain/vendor/utm/conversion.py b/gerrychain/vendor/utm/conversion.py
index 827a7a1c..8b52b94b 100644
--- a/gerrychain/vendor/utm/conversion.py
+++ b/gerrychain/vendor/utm/conversion.py
@@ -73,9 +73,7 @@ def negative(x):
return x < 0
-def to_latlon(
- easting, northing, zone_number, zone_letter=None, northern=None, strict=True
-):
+def to_latlon(easting, northing, zone_number, zone_letter=None, northern=None, strict=True):
"""This function convert an UTM coordinate into Latitude and Longitude
Parameters
@@ -109,13 +107,9 @@ def to_latlon(
if strict:
if not in_bounds(easting, 100000, 1000000, upper_strict=True):
- raise OutOfRangeError(
- "easting out of range (must be between 100.000 m and 999.999 m)"
- )
+ raise OutOfRangeError("easting out of range (must be between 100.000 m and 999.999 m)")
if not in_bounds(northing, 0, 10000000):
- raise OutOfRangeError(
- "northing out of range (must be between 0 m and 10.000.000 m)"
- )
+ raise OutOfRangeError("northing out of range (must be between 0 m and 10.000.000 m)")
check_valid_zone(zone_number, zone_letter)
@@ -167,8 +161,7 @@ def to_latlon(
latitude = (
p_rad
- - (p_tan / r)
- * (d2 / 2 - d4 / 24 * (5 + 3 * p_tan2 + 10 * c - 4 * c2 - 9 * E_P2))
+ - (p_tan / r) * (d2 / 2 - d4 / 24 * (5 + 3 * p_tan2 + 10 * c - 4 * c2 - 9 * E_P2))
+ d6 / 720 * (61 + 90 * p_tan2 + 298 * c + 45 * p_tan4 - 252 * E_P2 - 3 * c2)
)
@@ -203,13 +196,9 @@ def from_latlon(latitude, longitude, force_zone_number=None, force_zone_letter=N
.. _[1]: http://www.jaworski.ca/utmzones.htm
"""
if not in_bounds(latitude, -80.0, 84.0):
- raise OutOfRangeError(
- "latitude out of range (must be between 80 deg S and 84 deg N)"
- )
+ raise OutOfRangeError("latitude out of range (must be between 80 deg S and 84 deg N)")
if not in_bounds(longitude, -180.0, 180.0):
- raise OutOfRangeError(
- "longitude out of range (must be between 180 deg W and 180 deg E)"
- )
+ raise OutOfRangeError("longitude out of range (must be between 180 deg W and 180 deg E)")
if force_zone_number is not None:
check_valid_zone(force_zone_number, force_zone_letter)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..38b72260
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,102 @@
+[build-system]
+requires = ["setuptools>=80.0"]
+build-backend = "setuptools.build_meta"
+
+
+[tool.setuptools]
+packages = { find = { include = ["gerrychain*"], exclude = ["tests*"] } }
+
+[project]
+name = "gerrychain"
+version = "1.0.0a2"
+description = "GerryChain is a Python library for building ensembles of districting plans using Markov Chain Monte Carlo methods."
+readme = "README.rst"
+authors = [
+ { name = "Data and Democracy Lab", email = "code@mggg.org" },
+]
+requires-python = ">=3.11"
+license = { text = "MIT" }
+
+
+dependencies = [
+ "geopandas>=1.1.1",
+ "networkx>=3.6.1",
+ "numpy>=2.3.5",
+ "pandas>=2.3.3",
+ "rustworkx>=0.17.1",
+ "scipy>=1.16.3",
+ "shapely>=2.1.2",
+ "tqdm>=4.67.1",
+]
+
+[tool.black]
+line-length = 100
+
+[tool.isort]
+profile = "black"
+line_length = 100
+multi_line_output = 3
+include_trailing_comma = true
+
+[tool.ruff]
+line-length = 100
+extend-exclude = [
+ "tests",
+ "**/__init__.py",
+ "gerrychain/vendor"
+]
+
+[tool.ruff.lint]
+select = ["E", "W", "F"]
+ignore = [
+ "E501", # line length handled by black
+ "E203", # whitespace before ':', handled by black
+ "E731", # do not assign lambda expression
+]
+task-tags = ["TODO", "FIXME", "XXX", "HACK", "NOTE"]
+
+[tool.ruff.lint.pycodestyle]
+ignore-overlong-task-comments = true
+
+[dependency-groups]
+dev = [
+ # Linters, formatters, type checkers
+ "black>=25.1.0",
+ "isort>=6.0.1",
+ "autopep8>=2.3.2",
+ "myst-parser>=4.0.1",
+
+ # Iteractive kernel
+ "ipykernel>=6.30.1",
+ "ipywidgets>=8.1.7",
+
+ # Testing
+ "pytest>=8.4.2",
+ "pytest-cov>=6.3.0",
+
+ # Other
+ "pre-commit>=4.3.0",
+]
+docs = [
+ "myst-parser>=4.0.1",
+ "recommonmark>=0.7.1",
+ "sphinx>=8.2.3",
+ "sphinx-copybutton>=0.5.2",
+ "sphinx-rtd-theme>=3.0.2",
+]
+frm = [
+ "matplotlib>=3.10.7",
+]
+
+
+[[tool.uv.index]]
+name = "testpypi"
+url = "https://test.pypi.org/simple/"
+publish-url = "https://test.pypi.org/legacy/"
+explicit = true
+
+[[tool.uv.index]]
+name = "pypi"
+url = "https://pypi.org/simple/"
+publish-url = "https://upload.pypi.org/legacy/"
+explicit = true
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index cad6d8cf..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,37 +0,0 @@
-[flake8]
-max-line-length = 100
-ignore = E122,E123,E126,E127,E128,E731,E722,W503,W504
-exclude = build,gerrychain/_version.py,tests,conda.recipe,.git,versioneer.py,benchmarks,.asv,__init__.py,gerrychain/vendor/*
-
-[tool:pytest]
-norecursedirs= .* *.egg* build dist conda.recipe
-addopts =
- --ignore setup.py
- --cov=gerrychain
- --cov-report term-missing
- --tb native
- --strict
- --durations=20
-env =
- PYTHONHASHSEED=0
-markers =
- serial: execute test serially (to avoid race conditions)
-
-[versioneer]
-VCS = git
-versionfile_source = gerrychain/_version.py
-versionfile_build = gerrychain/_version.py
-tag_prefix = v
-parentdir_prefix = gerrychain-
-
-[bdist_wheel]
-universal=1
-
-[coverage:report]
-omit =
- setup.py
- versioneer.py
- gerrychain/__main__.py
- gerrychain/_version.py
- gerrychain/vendor/*
- tests/*
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 45bf7bab..00000000
--- a/setup.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from setuptools import find_packages, setup
-
-import versioneer
-
-with open("./README.rst") as f:
- long_description = f.read()
-
-requirements = [
- # package requirements go here
- "pandas",
- "scipy",
- "networkx",
- "matplotlib",
-]
-
-setup(
- name="gerrychain",
- description="Use Markov chain Monte Carlo to analyze districting plans and gerrymanders",
- author="Metric Geometry and Gerrymandering Group",
- author_email="engineering@mggg.org",
- maintainer="Metric Geometry and Gerrymandering Group",
- maintainer_email="engineering@mggg.org",
- long_description=long_description,
- long_description_content_type="text/x-rst",
- url="https://github.com/mggg/GerryChain",
- packages=find_packages(exclude=("tests",)),
- version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(),
- install_requires=requirements,
- keywords="GerryChain",
- classifiers=[
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12",
- "Operating System :: OS Independent",
- "License :: OSI Approved :: BSD License",
- ],
- extras_require={
- 'geo': ["shapely>=2.0.1", "geopandas>=0.12.2"]
- }
-)
diff --git a/tests/README.txt b/tests/README.txt
new file mode 100644
index 00000000..b2a0024e
--- /dev/null
+++ b/tests/README.txt
@@ -0,0 +1,6 @@
+This folder contains tests (and subfolders that also contain tests).
+
+As a convention (at least for now - October 2025), tests that Fred
+Mueller adds will be named test_frm_... Eventually the names should
+be changed to have the "_frm" deleted, but for now it will help
+identify big changes in testing
diff --git a/tests/_foo/do_laplacian.py b/tests/_foo/do_laplacian.py
new file mode 100644
index 00000000..c0ea775f
--- /dev/null
+++ b/tests/_foo/do_laplacian.py
@@ -0,0 +1,46 @@
+import networkx as nx
+import numpy as np
+import rustworkx as rx
+from graph import Graph
+
+# Create an RX graph (replace with your graph data)
+rx_graph = rx.PyGraph()
+rx_graph.add_nodes_from([0, 1, 2, 3])
+rx_graph.add_edges_from([(0, 1, "data"), (0, 2, "data"), (1, 2, "data"), (2, 3, "data")])
+
+# 1. Get the adjacency matrix
+adj_matrix = rx.adjacency_matrix(rx_graph)
+
+# 2. Calculate the degree matrix (simplified for this example)
+degree_matrix = np.diag([rx_graph.degree(node) for node in rx_graph.node_indices()])
+
+# 3. Calculate the Laplacian matrix
+rx_laplacian_matrix = degree_matrix - adj_matrix
+
+# frm: TODO: Debugging: Remove Debugging Code
+
+# print("RX Adjacency Matrix:")
+# print(adj_matrix)
+
+# print("\nRX Degree Matrix:")
+# print(degree_matrix)
+
+# print("\nRX Laplacian Matrix:")
+# print(rx_laplacian_matrix)
+
+# print("type of RX laplacian_matrix is: ", type(rx_laplacian_matrix))
+
+# Create an NX graph (replace with your graph data)
+nx_graph = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3)])
+nx_laplacian_matrix = nx.laplacian_matrix(nx_graph)
+
+# print("\nNX Laplacian Matrix:")
+# print(nx_laplacian_matrix)
+
+# print("type of NX laplacian_matrix is: ", type(nx_laplacian_matrix))
+
+gc_nx_graph = Graph.from_nx_graph(nx_graph)
+gc_rx_graph = Graph.from_rx_graph(rx_graph)
+
+# print("\ngc_laplacian(nx_graph) is: ", gctree.gc_laplacian_matrix(gc_nx_graph))
+# print("\ngc_laplacian(rx_graph) is: ", gctree.gc_laplacian_matrix(gc_rx_graph))
diff --git a/tests/_perf_tests/perf_test.py b/tests/_perf_tests/perf_test.py
new file mode 100644
index 00000000..7d8290da
--- /dev/null
+++ b/tests/_perf_tests/perf_test.py
@@ -0,0 +1,55 @@
+# Code copied from the GerryChain User Guide / Tutorial:
+
+import cProfile
+
+# Set the random seed so that the results are reproducible!
+import random
+from functools import partial
+
+from gerrychain import Graph, MarkovChain, Partition, accept, updaters
+from gerrychain.constraints import contiguous
+from gerrychain.proposals import recom
+
+
+def main():
+
+ random.seed(2024)
+ graph = Graph.from_json("./gerrymandria.json")
+
+ my_updaters = {
+ "population": updaters.Tally("TOTPOP"),
+ "cut_edges": updaters.cut_edges,
+ }
+
+ initial_partition = Partition(graph, assignment="district", updaters=my_updaters)
+
+ # This should be 8 since each district has 1 person in it.
+ # Note that the key "population" corresponds to the population updater
+ # that we defined above and not with the population column in the json file.
+ ideal_population = sum(initial_partition["population"].values()) / len(initial_partition)
+
+ proposal = partial(
+ recom,
+ pop_col="TOTPOP",
+ pop_target=ideal_population,
+ epsilon=0.01,
+ node_repeats=2,
+ )
+
+ recom_chain = MarkovChain(
+ proposal=proposal,
+ constraints=[contiguous],
+ accept=accept.always_accept,
+ initial_state=initial_partition,
+ total_steps=40,
+ )
+
+ assignment_list = []
+
+ for i, item in enumerate(recom_chain):
+ print(f"Finished step {i+1}/{len(recom_chain)}", end="\r")
+ assignment_list.append(item.assignment)
+
+
+if __name__ == "__main__":
+ cProfile.run("main()", sort="tottime")
diff --git a/tests/_perf_tests/perf_test2.py b/tests/_perf_tests/perf_test2.py
new file mode 100644
index 00000000..fb50c1de
--- /dev/null
+++ b/tests/_perf_tests/perf_test2.py
@@ -0,0 +1,86 @@
+import cProfile
+import sys
+from functools import partial
+
+from gerrychain import (
+ Election,
+ GeographicPartition,
+ Graph,
+ MarkovChain,
+ accept,
+ constraints,
+ updaters,
+)
+from gerrychain.proposals import recom
+
+
+def main():
+
+ graph = Graph.from_json("./PA_VTDs.json")
+
+ elections = [
+ Election("SEN10", {"Democratic": "SEN10D", "Republican": "SEN10R"}),
+ Election("SEN12", {"Democratic": "USS12D", "Republican": "USS12R"}),
+ Election("SEN16", {"Democratic": "T16SEND", "Republican": "T16SENR"}),
+ Election("PRES12", {"Democratic": "PRES12D", "Republican": "PRES12R"}),
+ Election("PRES16", {"Democratic": "T16PRESD", "Republican": "T16PRESR"}),
+ ]
+
+ # Population updater, for computing how close to equality the district
+ # populations are. "TOTPOP" is the population column from our shapefile.
+ my_updaters = {"population": updaters.Tally("TOT_POP", alias="population")}
+
+ # Election updaters, for computing election results using the vote totals
+ # from our shapefile.
+ election_updaters = {election.name: election for election in elections}
+ my_updaters.update(election_updaters)
+
+ initial_partition = GeographicPartition(
+ graph,
+ assignment="2011_PLA_1", # This identifies the district plan in 2011
+ updaters=my_updaters,
+ )
+
+ # The ReCom proposal needs to know the ideal population for the districts so that
+ # we can improve speed by bailing early on unbalanced partitions.
+
+ ideal_population = sum(initial_partition["population"].values()) / len(initial_partition)
+
+ # We use functools.partial to bind the extra parameters (pop_col, pop_target, epsilon, node_repeats)
+ # of the recom proposal.
+ proposal = partial(
+ recom,
+ pop_col="TOT_POP",
+ pop_target=ideal_population,
+ epsilon=0.02,
+ node_repeats=2,
+ )
+
+ def cut_edges_length(p):
+ return len(p["cut_edges"])
+
+ compactness_bound = constraints.UpperBound(
+ cut_edges_length, 2 * len(initial_partition["cut_edges"])
+ )
+
+ pop_constraint = constraints.within_percent_of_ideal_population(initial_partition, 0.02)
+
+ print("About to call MarkovChain", file=sys.stderr)
+
+ chain = MarkovChain(
+ proposal=proposal,
+ constraints=[pop_constraint, compactness_bound],
+ accept=accept.always_accept,
+ initial_state=initial_partition,
+ total_steps=1000,
+ )
+
+ print("Done with calling MarkovChain", file=sys.stderr)
+
+ print("About to get all assignments from the chain", file=sys.stderr)
+ _ = list(chain)
+ print("Done getting all assignments from the chain", file=sys.stderr)
+
+
+if __name__ == "__main__":
+ cProfile.run("main()", sort="tottime")
diff --git a/tests/conftest.py b/tests/conftest.py
index 501906ab..6e904454 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,9 +1,10 @@
+import random
+
+import networkx as nx
import pytest
from gerrychain import Graph, Partition
-import random
from gerrychain.updaters import cut_edges
-import networkx as nx
random.seed(2018)
@@ -15,8 +16,8 @@ def three_by_three_grid():
3 4 5
6 7 8
"""
- graph = Graph()
- graph.add_edges_from(
+ nx_graph = nx.Graph()
+ nx_graph.add_edges_from(
[
(0, 1),
(0, 3),
@@ -32,7 +33,7 @@ def three_by_three_grid():
(7, 8),
]
)
- return graph
+ return Graph.from_networkx(nx_graph)
@pytest.fixture
@@ -47,8 +48,8 @@ def four_by_five_grid_for_opt():
# 5 6 7 8 9
# 0 1 2 3 4
- graph = Graph()
- graph.add_nodes_from(
+ nx_graph = nx.Graph()
+ nx_graph.add_nodes_from(
[
(0, {"population": 10, "opt_value": 1, "MVAP": 2}),
(1, {"population": 10, "opt_value": 1, "MVAP": 2}),
@@ -73,7 +74,7 @@ def four_by_five_grid_for_opt():
]
)
- graph.add_edges_from(
+ nx_graph.add_edges_from(
[
(0, 1),
(0, 5),
@@ -109,26 +110,35 @@ def four_by_five_grid_for_opt():
]
)
- return graph
+ return Graph.from_networkx(nx_graph)
@pytest.fixture
def graph_with_random_data_factory(three_by_three_grid):
+
def factory(columns):
graph = three_by_three_grid
attach_random_data(graph, columns)
return graph
+ # A closure - will add random data (int) to all nodes for each named "column"
return factory
+# frm: TODO: Refactoring: This routine is only ever used immediately above in def factory(columns).
+# Is it part of the external API? If not, then it should be moved inside
+# the graph_with_random_data_factory() routine
def attach_random_data(graph, columns):
for node in graph.nodes:
for col in columns:
- graph.nodes[node][col] = random.randint(1, 1000)
+ graph.node_data(node)[col] = random.randint(1, 1000)
@pytest.fixture
+# frm: ???: Why not just always use three_by_three_grid? At least that gives
+# the reader an idea of how many nodes there are? What is the
+# value of just having a generic "graph" test fixture???
+#
def graph(three_by_three_grid):
return three_by_three_grid
@@ -143,9 +153,7 @@ def example_partition():
# From the docs: https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
def pytest_addoption(parser):
- parser.addoption(
- "--runslow", action="store_true", default=False, help="run slow tests"
- )
+ parser.addoption("--runslow", action="store_true", default=False, help="run slow tests")
def pytest_configure(config):
diff --git a/tests/constraints/test_contiguity.py b/tests/constraints/test_contiguity.py
index b94f1b5d..87ade1d7 100644
--- a/tests/constraints/test_contiguity.py
+++ b/tests/constraints/test_contiguity.py
@@ -3,6 +3,7 @@
def test_contiguous_components(graph):
+
partition = Partition(graph, {0: 1, 1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 1, 7: 1, 8: 1})
components = contiguous_components(partition)
@@ -10,9 +11,14 @@ def test_contiguous_components(graph):
assert len(components[1]) == 2
assert len(components[2]) == 1
- assert set(frozenset(g.nodes) for g in components[1]) == {
+ # Confirm that the appropriate connected subgraphs were found. Note that we need
+ # to compare against the original node_ids, since RX node_ids change every time
+ # you create a subgraph.
+
+ assert set(frozenset(g.original_nx_node_ids_for_set(g.nodes)) for g in components[1]) == {
frozenset([0, 1, 2]),
frozenset([6, 7, 8]),
}
-
- assert set(components[2][0].nodes) == {3, 4, 5}
+ assert set(frozenset(g.original_nx_node_ids_for_set(g.nodes)) for g in components[2]) == {
+ frozenset([3, 4, 5]),
+ }
diff --git a/tests/constraints/test_validity.py b/tests/constraints/test_validity.py
index 7bc3e01d..d6e88a11 100644
--- a/tests/constraints/test_validity.py
+++ b/tests/constraints/test_validity.py
@@ -4,15 +4,18 @@
import numpy
import pytest
-from gerrychain.constraints import (SelfConfiguringLowerBound, Validator,
- contiguous, contiguous_bfs,
- districts_within_tolerance,
- no_vanishing_districts,
- single_flip_contiguous,
- deviation_from_ideal)
+from gerrychain.constraints import (
+ SelfConfiguringLowerBound,
+ Validator,
+ contiguous,
+ deviation_from_ideal,
+ districts_within_tolerance,
+ no_vanishing_districts,
+ single_flip_contiguous,
+)
+from gerrychain.graph import Graph
from gerrychain.partition import Partition
from gerrychain.partition.partition import get_assignment
-from gerrychain.graph import Graph
@pytest.fixture
@@ -50,15 +53,16 @@ def discontiguous_partition(discontiguous_partition_with_flips):
def test_contiguous_with_contiguity_no_flips_is_true(contiguous_partition):
assert contiguous(contiguous_partition)
assert single_flip_contiguous(contiguous_partition)
- assert contiguous_bfs(contiguous_partition)
+ assert contiguous(contiguous_partition)
def test_contiguous_with_contiguity_flips_is_true(contiguous_partition_with_flips):
contiguous_partition, test_flips = contiguous_partition_with_flips
+ # frm: TODO: Testing: Figure out whether test_flips are in original node_ids or internal RX node_ids
contiguous_partition2 = contiguous_partition.flip(test_flips)
assert contiguous(contiguous_partition2)
assert single_flip_contiguous(contiguous_partition2)
- assert contiguous_bfs(contiguous_partition2)
+ assert contiguous(contiguous_partition2)
def test_discontiguous_with_contiguous_no_flips_is_false(discontiguous_partition):
@@ -66,41 +70,30 @@ def test_discontiguous_with_contiguous_no_flips_is_false(discontiguous_partition
def test_discontiguous_with_single_flip_contiguous_no_flips_is_false(
- discontiguous_partition
+ discontiguous_partition,
):
assert not single_flip_contiguous(discontiguous_partition)
-def test_discontiguous_with_contiguous_bfs_no_flips_is_false(discontiguous_partition):
- assert not contiguous_bfs(discontiguous_partition)
-
-
-def test_discontiguous_with_contiguous_flips_is_false(
- discontiguous_partition_with_flips
-):
- part, test_flips = discontiguous_partition_with_flips
- discontiguous_partition2 = part.flip(test_flips)
- assert not contiguous(discontiguous_partition2)
-
-
@pytest.mark.xfail(
- reason="single_flip_contiguous does not work"
- "when the previous partition is discontiguous"
+ reason="single_flip_contiguous does not work" "when the previous partition is discontiguous"
)
def test_discontiguous_with_single_flip_contiguous_flips_is_false(
- discontiguous_partition_with_flips
+ discontiguous_partition_with_flips,
):
part, test_flips = discontiguous_partition_with_flips
+ # frm: TODO: Testing: Figure out whether test_flips are in original node_ids or internal RX node_ids
discontiguous_partition2 = part.flip(test_flips)
assert not single_flip_contiguous(discontiguous_partition2)
-def test_discontiguous_with_contiguous_bfs_flips_is_false(
- discontiguous_partition_with_flips
+def test_discontiguous_with_contiguous_flips_is_false(
+ discontiguous_partition_with_flips,
):
part, test_flips = discontiguous_partition_with_flips
+ # frm: TODO: Testing: Figure out whether test_flips are in original node_ids or internal RX node_ids
discontiguous_partition2 = part.flip(test_flips)
- assert not contiguous_bfs(discontiguous_partition2)
+ assert not contiguous(discontiguous_partition2)
def test_districts_within_tolerance_returns_false_if_districts_are_not_within_tolerance():
@@ -175,7 +168,7 @@ def test_no_vanishing_districts_works():
assert not no_vanishing_districts(partition)
+
def test_deviation_from_ideal():
mock_partition = {"population": {0: 99.0, 1: 101.0}}
- assert deviation_from_ideal(mock_partition, "population") == \
- {0: -0.01, 1: 0.01}
\ No newline at end of file
+ assert deviation_from_ideal(mock_partition, "population") == {0: -0.01, 1: 0.01}
diff --git a/tests/frm_tests/README.txt b/tests/frm_tests/README.txt
new file mode 100644
index 00000000..037dbec1
--- /dev/null
+++ b/tests/frm_tests/README.txt
@@ -0,0 +1,6 @@
+This directory contains tests added by Fred Mueller
+for the work he is doing / did to convert GerryChain
+from using NetworkX to using RustworkX.
+
+Eventually if his code becomes the new thing, these
+tests should be rolled into the normal tests directory.
diff --git a/tests/frm_tests/__init__.py b/tests/frm_tests/__init__.py
new file mode 100644
index 00000000..b3885632
--- /dev/null
+++ b/tests/frm_tests/__init__.py
@@ -0,0 +1 @@
+print("__init__.py invoked")
diff --git a/tests/frm_tests/frm_regression_test.README.txt b/tests/frm_tests/frm_regression_test.README.txt
new file mode 100644
index 00000000..a1051154
--- /dev/null
+++ b/tests/frm_tests/frm_regression_test.README.txt
@@ -0,0 +1,12 @@
+I created a regression test based on the User Guide code so that
+I could make changes and quickly test whether they affected
+user code.
+
+The 3 files that I added are:
+
+ * frm_regression_test.py
+ * Code copied from the User Guide
+ * gerrymandria.json
+ * JSON for the graph used in the regression test
+ * frm_regression_test.README.txt
+ * This file
diff --git a/tests/frm_tests/gerrymandria.json b/tests/frm_tests/gerrymandria.json
new file mode 100644
index 00000000..a6ca2fae
--- /dev/null
+++ b/tests/frm_tests/gerrymandria.json
@@ -0,0 +1,1641 @@
+{
+ "directed": false,
+ "multigraph": false,
+ "graph": [],
+ "nodes": [
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 0,
+ "county": "1",
+ "district": "1",
+ "precinct": 0,
+ "muni": "1",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 0
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 1,
+ "county": "1",
+ "district": "1",
+ "precinct": 1,
+ "muni": "1",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 1
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 2,
+ "county": "1",
+ "district": "1",
+ "precinct": 2,
+ "muni": "5",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 2
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 3,
+ "county": "1",
+ "district": "1",
+ "precinct": 3,
+ "muni": "5",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 3
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 4,
+ "county": "3",
+ "district": "1",
+ "precinct": 4,
+ "muni": "9",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 4
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 5,
+ "county": "3",
+ "district": "1",
+ "precinct": 5,
+ "muni": "9",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 5
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 6,
+ "county": "3",
+ "district": "1",
+ "precinct": 6,
+ "muni": "13",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 6
+ },
+ {
+ "TOTPOP": 1,
+ "x": 0,
+ "y": 7,
+ "county": "3",
+ "district": "1",
+ "precinct": 7,
+ "muni": "13",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 7
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 0,
+ "county": "1",
+ "district": "2",
+ "precinct": 8,
+ "muni": "1",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "2",
+ "id": 8
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 1,
+ "county": "1",
+ "district": "2",
+ "precinct": 9,
+ "muni": "1",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 9
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 2,
+ "county": "1",
+ "district": "2",
+ "precinct": 10,
+ "muni": "5",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 10
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 3,
+ "county": "1",
+ "district": "2",
+ "precinct": 11,
+ "muni": "5",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 11
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 4,
+ "county": "3",
+ "district": "2",
+ "precinct": 12,
+ "muni": "9",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 12
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 5,
+ "county": "3",
+ "district": "2",
+ "precinct": 13,
+ "muni": "9",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 13
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 6,
+ "county": "3",
+ "district": "2",
+ "precinct": 14,
+ "muni": "13",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 14
+ },
+ {
+ "TOTPOP": 1,
+ "x": 1,
+ "y": 7,
+ "county": "3",
+ "district": "2",
+ "precinct": 15,
+ "muni": "13",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "4",
+ "id": 15
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 0,
+ "county": "1",
+ "district": "3",
+ "precinct": 16,
+ "muni": "2",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "4",
+ "id": 16
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 1,
+ "county": "1",
+ "district": "3",
+ "precinct": 17,
+ "muni": "2",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 17
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 2,
+ "county": "1",
+ "district": "3",
+ "precinct": 18,
+ "muni": "6",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 18
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 3,
+ "county": "1",
+ "district": "3",
+ "precinct": 19,
+ "muni": "6",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "2",
+ "id": 19
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 4,
+ "county": "3",
+ "district": "3",
+ "precinct": 20,
+ "muni": "10",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 20
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 5,
+ "county": "3",
+ "district": "3",
+ "precinct": 21,
+ "muni": "10",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 21
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 6,
+ "county": "3",
+ "district": "3",
+ "precinct": 22,
+ "muni": "14",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 22
+ },
+ {
+ "TOTPOP": 1,
+ "x": 2,
+ "y": 7,
+ "county": "3",
+ "district": "3",
+ "precinct": 23,
+ "muni": "14",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "4",
+ "id": 23
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 0,
+ "county": "1",
+ "district": "4",
+ "precinct": 24,
+ "muni": "2",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "4",
+ "id": 24
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 1,
+ "county": "1",
+ "district": "4",
+ "precinct": 25,
+ "muni": "2",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 25
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 2,
+ "county": "1",
+ "district": "4",
+ "precinct": 26,
+ "muni": "6",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 26
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 3,
+ "county": "1",
+ "district": "4",
+ "precinct": 27,
+ "muni": "6",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 27
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 4,
+ "county": "3",
+ "district": "4",
+ "precinct": 28,
+ "muni": "10",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 28
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 5,
+ "county": "3",
+ "district": "4",
+ "precinct": 29,
+ "muni": "10",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 29
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 6,
+ "county": "3",
+ "district": "4",
+ "precinct": 30,
+ "muni": "14",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 30
+ },
+ {
+ "TOTPOP": 1,
+ "x": 3,
+ "y": 7,
+ "county": "3",
+ "district": "4",
+ "precinct": 31,
+ "muni": "14",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 31
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 0,
+ "county": "2",
+ "district": "5",
+ "precinct": 32,
+ "muni": "3",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 32
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 1,
+ "county": "2",
+ "district": "5",
+ "precinct": 33,
+ "muni": "3",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 33
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 2,
+ "county": "2",
+ "district": "5",
+ "precinct": 34,
+ "muni": "7",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 34
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 3,
+ "county": "2",
+ "district": "5",
+ "precinct": 35,
+ "muni": "7",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 35
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 4,
+ "county": "4",
+ "district": "5",
+ "precinct": 36,
+ "muni": "11",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 36
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 5,
+ "county": "4",
+ "district": "5",
+ "precinct": 37,
+ "muni": "11",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 37
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 6,
+ "county": "4",
+ "district": "5",
+ "precinct": 38,
+ "muni": "15",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 38
+ },
+ {
+ "TOTPOP": 1,
+ "x": 4,
+ "y": 7,
+ "county": "4",
+ "district": "5",
+ "precinct": 39,
+ "muni": "15",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 39
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 0,
+ "county": "2",
+ "district": "6",
+ "precinct": 40,
+ "muni": "3",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 40
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 1,
+ "county": "2",
+ "district": "6",
+ "precinct": 41,
+ "muni": "3",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 41
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 2,
+ "county": "2",
+ "district": "6",
+ "precinct": 42,
+ "muni": "7",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 42
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 3,
+ "county": "2",
+ "district": "6",
+ "precinct": 43,
+ "muni": "7",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "4",
+ "id": 43
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 4,
+ "county": "4",
+ "district": "6",
+ "precinct": 44,
+ "muni": "11",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 44
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 5,
+ "county": "4",
+ "district": "6",
+ "precinct": 45,
+ "muni": "11",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 45
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 6,
+ "county": "4",
+ "district": "6",
+ "precinct": 46,
+ "muni": "15",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 46
+ },
+ {
+ "TOTPOP": 1,
+ "x": 5,
+ "y": 7,
+ "county": "4",
+ "district": "6",
+ "precinct": 47,
+ "muni": "15",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 47
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 0,
+ "county": "2",
+ "district": "7",
+ "precinct": 48,
+ "muni": "4",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 48
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 1,
+ "county": "2",
+ "district": "7",
+ "precinct": 49,
+ "muni": "4",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 49
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 2,
+ "county": "2",
+ "district": "7",
+ "precinct": 50,
+ "muni": "8",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 50
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 3,
+ "county": "2",
+ "district": "7",
+ "precinct": 51,
+ "muni": "8",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 51
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 4,
+ "county": "4",
+ "district": "7",
+ "precinct": 52,
+ "muni": "12",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "3",
+ "id": 52
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 5,
+ "county": "4",
+ "district": "7",
+ "precinct": 53,
+ "muni": "12",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 53
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 6,
+ "county": "4",
+ "district": "7",
+ "precinct": 54,
+ "muni": "16",
+ "boundary_node": false,
+ "boundary_perim": 0,
+ "water_dist": "1",
+ "id": 54
+ },
+ {
+ "TOTPOP": 1,
+ "x": 6,
+ "y": 7,
+ "county": "4",
+ "district": "7",
+ "precinct": 55,
+ "muni": "16",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 55
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 0,
+ "county": "2",
+ "district": "8",
+ "precinct": 56,
+ "muni": "4",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 56
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 1,
+ "county": "2",
+ "district": "8",
+ "precinct": 57,
+ "muni": "4",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 57
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 2,
+ "county": "2",
+ "district": "8",
+ "precinct": 58,
+ "muni": "8",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 58
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 3,
+ "county": "2",
+ "district": "8",
+ "precinct": 59,
+ "muni": "8",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 59
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 4,
+ "county": "4",
+ "district": "8",
+ "precinct": 60,
+ "muni": "12",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "3",
+ "id": 60
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 5,
+ "county": "4",
+ "district": "8",
+ "precinct": 61,
+ "muni": "12",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 61
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 6,
+ "county": "4",
+ "district": "8",
+ "precinct": 62,
+ "muni": "16",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 62
+ },
+ {
+ "TOTPOP": 1,
+ "x": 7,
+ "y": 7,
+ "county": "4",
+ "district": "8",
+ "precinct": 63,
+ "muni": "16",
+ "boundary_node": true,
+ "boundary_perim": 1,
+ "water_dist": "1",
+ "id": 63
+ }
+ ],
+ "adjacency": [
+ [
+ {
+ "id": 8
+ },
+ {
+ "id": 1
+ }
+ ],
+ [
+ {
+ "id": 0
+ },
+ {
+ "id": 9
+ },
+ {
+ "id": 2
+ }
+ ],
+ [
+ {
+ "id": 1
+ },
+ {
+ "id": 10
+ },
+ {
+ "id": 3
+ }
+ ],
+ [
+ {
+ "id": 2
+ },
+ {
+ "id": 11
+ },
+ {
+ "id": 4
+ }
+ ],
+ [
+ {
+ "id": 3
+ },
+ {
+ "id": 12
+ },
+ {
+ "id": 5
+ }
+ ],
+ [
+ {
+ "id": 4
+ },
+ {
+ "id": 13
+ },
+ {
+ "id": 6
+ }
+ ],
+ [
+ {
+ "id": 5
+ },
+ {
+ "id": 14
+ },
+ {
+ "id": 7
+ }
+ ],
+ [
+ {
+ "id": 6
+ },
+ {
+ "id": 15
+ }
+ ],
+ [
+ {
+ "id": 0
+ },
+ {
+ "id": 16
+ },
+ {
+ "id": 9
+ }
+ ],
+ [
+ {
+ "id": 1
+ },
+ {
+ "id": 8
+ },
+ {
+ "id": 17
+ },
+ {
+ "id": 10
+ }
+ ],
+ [
+ {
+ "id": 2
+ },
+ {
+ "id": 9
+ },
+ {
+ "id": 18
+ },
+ {
+ "id": 11
+ }
+ ],
+ [
+ {
+ "id": 3
+ },
+ {
+ "id": 10
+ },
+ {
+ "id": 19
+ },
+ {
+ "id": 12
+ }
+ ],
+ [
+ {
+ "id": 4
+ },
+ {
+ "id": 11
+ },
+ {
+ "id": 20
+ },
+ {
+ "id": 13
+ }
+ ],
+ [
+ {
+ "id": 5
+ },
+ {
+ "id": 12
+ },
+ {
+ "id": 21
+ },
+ {
+ "id": 14
+ }
+ ],
+ [
+ {
+ "id": 6
+ },
+ {
+ "id": 13
+ },
+ {
+ "id": 22
+ },
+ {
+ "id": 15
+ }
+ ],
+ [
+ {
+ "id": 7
+ },
+ {
+ "id": 14
+ },
+ {
+ "id": 23
+ }
+ ],
+ [
+ {
+ "id": 8
+ },
+ {
+ "id": 24
+ },
+ {
+ "id": 17
+ }
+ ],
+ [
+ {
+ "id": 9
+ },
+ {
+ "id": 16
+ },
+ {
+ "id": 25
+ },
+ {
+ "id": 18
+ }
+ ],
+ [
+ {
+ "id": 10
+ },
+ {
+ "id": 17
+ },
+ {
+ "id": 26
+ },
+ {
+ "id": 19
+ }
+ ],
+ [
+ {
+ "id": 11
+ },
+ {
+ "id": 18
+ },
+ {
+ "id": 27
+ },
+ {
+ "id": 20
+ }
+ ],
+ [
+ {
+ "id": 12
+ },
+ {
+ "id": 19
+ },
+ {
+ "id": 28
+ },
+ {
+ "id": 21
+ }
+ ],
+ [
+ {
+ "id": 13
+ },
+ {
+ "id": 20
+ },
+ {
+ "id": 29
+ },
+ {
+ "id": 22
+ }
+ ],
+ [
+ {
+ "id": 14
+ },
+ {
+ "id": 21
+ },
+ {
+ "id": 30
+ },
+ {
+ "id": 23
+ }
+ ],
+ [
+ {
+ "id": 15
+ },
+ {
+ "id": 22
+ },
+ {
+ "id": 31
+ }
+ ],
+ [
+ {
+ "id": 16
+ },
+ {
+ "id": 32
+ },
+ {
+ "id": 25
+ }
+ ],
+ [
+ {
+ "id": 17
+ },
+ {
+ "id": 24
+ },
+ {
+ "id": 33
+ },
+ {
+ "id": 26
+ }
+ ],
+ [
+ {
+ "id": 18
+ },
+ {
+ "id": 25
+ },
+ {
+ "id": 34
+ },
+ {
+ "id": 27
+ }
+ ],
+ [
+ {
+ "id": 19
+ },
+ {
+ "id": 26
+ },
+ {
+ "id": 35
+ },
+ {
+ "id": 28
+ }
+ ],
+ [
+ {
+ "id": 20
+ },
+ {
+ "id": 27
+ },
+ {
+ "id": 36
+ },
+ {
+ "id": 29
+ }
+ ],
+ [
+ {
+ "id": 21
+ },
+ {
+ "id": 28
+ },
+ {
+ "id": 37
+ },
+ {
+ "id": 30
+ }
+ ],
+ [
+ {
+ "id": 22
+ },
+ {
+ "id": 29
+ },
+ {
+ "id": 38
+ },
+ {
+ "id": 31
+ }
+ ],
+ [
+ {
+ "id": 23
+ },
+ {
+ "id": 30
+ },
+ {
+ "id": 39
+ }
+ ],
+ [
+ {
+ "id": 24
+ },
+ {
+ "id": 40
+ },
+ {
+ "id": 33
+ }
+ ],
+ [
+ {
+ "id": 25
+ },
+ {
+ "id": 32
+ },
+ {
+ "id": 41
+ },
+ {
+ "id": 34
+ }
+ ],
+ [
+ {
+ "id": 26
+ },
+ {
+ "id": 33
+ },
+ {
+ "id": 42
+ },
+ {
+ "id": 35
+ }
+ ],
+ [
+ {
+ "id": 27
+ },
+ {
+ "id": 34
+ },
+ {
+ "id": 43
+ },
+ {
+ "id": 36
+ }
+ ],
+ [
+ {
+ "id": 28
+ },
+ {
+ "id": 35
+ },
+ {
+ "id": 44
+ },
+ {
+ "id": 37
+ }
+ ],
+ [
+ {
+ "id": 29
+ },
+ {
+ "id": 36
+ },
+ {
+ "id": 45
+ },
+ {
+ "id": 38
+ }
+ ],
+ [
+ {
+ "id": 30
+ },
+ {
+ "id": 37
+ },
+ {
+ "id": 46
+ },
+ {
+ "id": 39
+ }
+ ],
+ [
+ {
+ "id": 31
+ },
+ {
+ "id": 38
+ },
+ {
+ "id": 47
+ }
+ ],
+ [
+ {
+ "id": 32
+ },
+ {
+ "id": 48
+ },
+ {
+ "id": 41
+ }
+ ],
+ [
+ {
+ "id": 33
+ },
+ {
+ "id": 40
+ },
+ {
+ "id": 49
+ },
+ {
+ "id": 42
+ }
+ ],
+ [
+ {
+ "id": 34
+ },
+ {
+ "id": 41
+ },
+ {
+ "id": 50
+ },
+ {
+ "id": 43
+ }
+ ],
+ [
+ {
+ "id": 35
+ },
+ {
+ "id": 42
+ },
+ {
+ "id": 51
+ },
+ {
+ "id": 44
+ }
+ ],
+ [
+ {
+ "id": 36
+ },
+ {
+ "id": 43
+ },
+ {
+ "id": 52
+ },
+ {
+ "id": 45
+ }
+ ],
+ [
+ {
+ "id": 37
+ },
+ {
+ "id": 44
+ },
+ {
+ "id": 53
+ },
+ {
+ "id": 46
+ }
+ ],
+ [
+ {
+ "id": 38
+ },
+ {
+ "id": 45
+ },
+ {
+ "id": 54
+ },
+ {
+ "id": 47
+ }
+ ],
+ [
+ {
+ "id": 39
+ },
+ {
+ "id": 46
+ },
+ {
+ "id": 55
+ }
+ ],
+ [
+ {
+ "id": 40
+ },
+ {
+ "id": 56
+ },
+ {
+ "id": 49
+ }
+ ],
+ [
+ {
+ "id": 41
+ },
+ {
+ "id": 48
+ },
+ {
+ "id": 57
+ },
+ {
+ "id": 50
+ }
+ ],
+ [
+ {
+ "id": 42
+ },
+ {
+ "id": 49
+ },
+ {
+ "id": 58
+ },
+ {
+ "id": 51
+ }
+ ],
+ [
+ {
+ "id": 43
+ },
+ {
+ "id": 50
+ },
+ {
+ "id": 59
+ },
+ {
+ "id": 52
+ }
+ ],
+ [
+ {
+ "id": 44
+ },
+ {
+ "id": 51
+ },
+ {
+ "id": 60
+ },
+ {
+ "id": 53
+ }
+ ],
+ [
+ {
+ "id": 45
+ },
+ {
+ "id": 52
+ },
+ {
+ "id": 61
+ },
+ {
+ "id": 54
+ }
+ ],
+ [
+ {
+ "id": 46
+ },
+ {
+ "id": 53
+ },
+ {
+ "id": 62
+ },
+ {
+ "id": 55
+ }
+ ],
+ [
+ {
+ "id": 47
+ },
+ {
+ "id": 54
+ },
+ {
+ "id": 63
+ }
+ ],
+ [
+ {
+ "id": 48
+ },
+ {
+ "id": 57
+ }
+ ],
+ [
+ {
+ "id": 49
+ },
+ {
+ "id": 56
+ },
+ {
+ "id": 58
+ }
+ ],
+ [
+ {
+ "id": 50
+ },
+ {
+ "id": 57
+ },
+ {
+ "id": 59
+ }
+ ],
+ [
+ {
+ "id": 51
+ },
+ {
+ "id": 58
+ },
+ {
+ "id": 60
+ }
+ ],
+ [
+ {
+ "id": 52
+ },
+ {
+ "id": 59
+ },
+ {
+ "id": 61
+ }
+ ],
+ [
+ {
+ "id": 53
+ },
+ {
+ "id": 60
+ },
+ {
+ "id": 62
+ }
+ ],
+ [
+ {
+ "id": 54
+ },
+ {
+ "id": 61
+ },
+ {
+ "id": 63
+ }
+ ],
+ [
+ {
+ "id": 55
+ },
+ {
+ "id": 62
+ }
+ ]
+ ]
+}
\ No newline at end of file
diff --git a/tests/frm_tests/test_frm_make_graph.py b/tests/frm_tests/test_frm_make_graph.py
new file mode 100644
index 00000000..4220494e
--- /dev/null
+++ b/tests/frm_tests/test_frm_make_graph.py
@@ -0,0 +1,286 @@
+################################################################
+#
+# frm: This file was copied from test_make_graph.py (to make
+# use of their fixtures. It should eventually evolve into
+# a reasonable test of additional functions added by me
+# to gerrychain.graph
+#
+################################################################
+
+import pathlib
+from tempfile import TemporaryDirectory
+from unittest.mock import patch
+
+import geopandas as gp
+import networkx
+import pandas
+import pytest
+from pyproj import CRS
+from shapely.geometry import Polygon
+
+from gerrychain.graph import Graph
+from gerrychain.graph.geo import GeometryError
+
+
+@pytest.fixture
+def geodataframe():
+ a = Polygon([(0, 0), (0, 1), (1, 1), (1, 0)])
+ b = Polygon([(0, 1), (0, 2), (1, 2), (1, 1)])
+ c = Polygon([(1, 0), (1, 1), (2, 1), (2, 0)])
+ d = Polygon([(1, 1), (1, 2), (2, 2), (2, 1)])
+ df = gp.GeoDataFrame({"ID": ["a", "b", "c", "d"], "geometry": [a, b, c, d]})
+ df.crs = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
+ return df
+
+
+@pytest.fixture
+def gdf_with_data(geodataframe):
+ geodataframe["data"] = list(range(len(geodataframe)))
+ geodataframe["data2"] = list(range(len(geodataframe)))
+ return geodataframe
+
+
+@pytest.fixture
+def geodataframe_with_boundary():
+ """
+ abe
+ ade
+ ace
+ """
+ a = Polygon([(0, 0), (0, 1), (0, 2), (0, 3), (1, 3), (1, 2), (1, 1), (1, 0)])
+ b = Polygon([(1, 2), (1, 3), (2, 3), (2, 2)])
+ c = Polygon([(1, 0), (1, 1), (2, 1), (2, 0)])
+ d = Polygon([(1, 1), (1, 2), (2, 2), (2, 1)])
+ e = Polygon([(2, 0), (2, 1), (2, 2), (2, 3), (3, 3), (3, 2), (3, 1), (3, 0)])
+ df = gp.GeoDataFrame({"ID": ["a", "b", "c", "d", "e"], "geometry": [a, b, c, d, e]})
+ df.crs = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs"
+ return df
+
+
+@pytest.fixture
+def shapefile(gdf_with_data):
+ with TemporaryDirectory() as d:
+ filepath = pathlib.Path(d) / "temp.shp"
+ filename = str(filepath.absolute())
+ gdf_with_data.to_file(filename)
+ yield filename
+
+
+@pytest.fixture
+def target_file():
+ with TemporaryDirectory() as d:
+ filepath = pathlib.Path(d) / "temp.shp"
+ filename = str(filepath.absolute())
+ yield filename
+
+
+def test_add_data_to_graph_can_handle_column_names_that_start_with_numbers():
+ nx_graph = networkx.Graph([("01", "02"), ("02", "03"), ("03", "01")])
+ df = pandas.DataFrame({"16SenDVote": [20, 30, 50], "node": ["01", "02", "03"]})
+ df = df.set_index("node")
+
+ graph = Graph.from_networkx(nx_graph)
+ graph.add_data(df, ["16SenDVote"])
+
+ assert nx_graph.nodes["01"]["16SenDVote"] == 20
+ assert nx_graph.nodes["02"]["16SenDVote"] == 30
+ assert nx_graph.nodes["03"]["16SenDVote"] == 50
+
+ assert graph.node_data("01")["16SenDVote"] == 20
+ assert graph.node_data("02")["16SenDVote"] == 30
+ assert graph.node_data("03")["16SenDVote"] == 50
+
+
+def test_join_can_handle_right_index():
+ nx_graph = networkx.Graph([("01", "02"), ("02", "03"), ("03", "01")])
+ df = pandas.DataFrame({"16SenDVote": [20, 30, 50], "node": ["01", "02", "03"]})
+
+ graph = Graph.from_networkx(nx_graph)
+
+ graph.join(df, ["16SenDVote"], right_index="node")
+
+ assert graph.node_data("01")["16SenDVote"] == 20
+ assert graph.node_data("02")["16SenDVote"] == 30
+ assert graph.node_data("03")["16SenDVote"] == 50
+
+
+def test_make_graph_from_dataframe_creates_graph(geodataframe):
+ graph = Graph.from_geodataframe(geodataframe)
+ assert isinstance(graph, Graph)
+
+
+def test_make_graph_from_dataframe_preserves_df_index(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df)
+ assert set(graph.nodes) == {"a", "b", "c", "d"}
+
+
+def test_make_graph_from_dataframe_gives_correct_graph(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df)
+
+ assert edge_set_equal(set(graph.edges), {("a", "b"), ("a", "c"), ("b", "d"), ("c", "d")})
+
+
+def test_make_graph_works_with_queen_adjacency(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df, adjacency="queen")
+
+ assert edge_set_equal(
+ set(graph.edges),
+ {("a", "b"), ("a", "c"), ("b", "d"), ("c", "d"), ("a", "d"), ("b", "c")},
+ )
+
+
+def test_can_pass_queen_or_rook_strings_to_control_adjacency(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df, adjacency="queen")
+
+ assert edge_set_equal(
+ set(graph.edges),
+ {("a", "b"), ("a", "c"), ("b", "d"), ("c", "d"), ("a", "d"), ("b", "c")},
+ )
+
+
+def test_can_insist_on_not_reprojecting(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df, reproject=False)
+
+ for node in ("a", "b", "c", "d"):
+ assert graph.node_data(node)["area"] == 1
+
+ for edge in graph.edges:
+ assert graph.edge_data(edge)["shared_perim"] == 1
+
+
+def test_does_not_reproject_by_default(geodataframe):
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df)
+
+ for node in ("a", "b", "c", "d"):
+ assert graph.node_data(node)["area"] == 1.0
+
+ for edge in graph.edges:
+ assert graph.edge_data(edge)["shared_perim"] == 1.0
+
+
+def test_reproject(geodataframe):
+ # I don't know what the areas and perimeters are in UTM for these made-up polygons,
+ # but I'm pretty sure they're not 1.
+ df = geodataframe.set_index("ID")
+ graph = Graph.from_geodataframe(df, reproject=True)
+
+ for node in ("a", "b", "c", "d"):
+ assert graph.node_data(node)["area"] != 1
+
+ for edge in graph.edges:
+ assert graph.edge_data(edge)["shared_perim"] != 1
+
+
+def test_identifies_boundary_nodes(geodataframe_with_boundary):
+ df = geodataframe_with_boundary.set_index("ID")
+ graph = Graph.from_geodataframe(df)
+
+ for node in ("a", "b", "c", "e"):
+ assert graph.node_data(node)["boundary_node"]
+ assert not graph.node_data("d")["boundary_node"]
+
+
+def test_computes_boundary_perims(geodataframe_with_boundary):
+ df = geodataframe_with_boundary.set_index("ID")
+ graph = Graph.from_geodataframe(df, reproject=False)
+
+ expected = {"a": 5, "e": 5, "b": 1, "c": 1}
+
+ for node, value in expected.items():
+ assert graph.node_data(node)["boundary_perim"] == value
+
+
+def edge_set_equal(set1, set2):
+ return {(y, x) for x, y in set1} | set1 == {(y, x) for x, y in set2} | set2
+
+
+def test_from_file_adds_all_data_by_default(shapefile):
+ graph = Graph.from_file(shapefile)
+
+ # data dictionaries for all of the nodes
+ all_node_data = [graph.node_data(node_id) for node_id in graph.node_indices]
+
+ assert all("data" in node_data for node_data in all_node_data)
+ assert all("data2" in node_data for node_data in all_node_data)
+
+
+def test_from_file_and_then_to_json_does_not_error(shapefile, target_file):
+ graph = Graph.from_file(shapefile)
+
+ # Even the geometry column is copied to the graph
+
+ # data dictionaries for all of the nodes
+ all_node_data = [graph.node_data(node_id) for node_id in graph.node_indices]
+
+ assert all("geometry" in node_data for node_data in all_node_data)
+
+ graph.to_json(target_file)
+
+
+def test_from_file_and_then_to_json_with_geometries(shapefile, target_file):
+ graph = Graph.from_file(shapefile)
+
+ # data dictionaries for all of the nodes
+ all_node_data = [graph.node_data(node_id) for node_id in graph.node_indices]
+
+ # Even the geometry column is copied to the graph
+ assert all("geometry" in node_data for node_data in all_node_data)
+
+ # frm: ??? Does anything check that the file is actually written?
+ graph.to_json(target_file, include_geometries_as_geojson=True)
+
+
+def test_graph_warns_for_islands():
+ nx_graph = networkx.Graph()
+ nx_graph.add_node(0)
+ graph = Graph.from_networkx(nx_graph)
+
+ with pytest.warns(Warning):
+ graph.warn_for_islands()
+
+
+def test_graph_raises_if_crs_is_missing_when_reprojecting(geodataframe):
+ geodataframe.crs = None
+
+ with pytest.raises(ValueError):
+ Graph.from_geodataframe(geodataframe, reproject=True)
+
+
+def test_raises_geometry_error_if_invalid_geometry(shapefile):
+ with patch("gerrychain.graph.geo.explain_validity") as explain:
+ explain.return_value = "Invalid geometry"
+ with pytest.raises(GeometryError):
+ Graph.from_file(shapefile, ignore_errors=False)
+
+
+def test_can_ignore_errors_while_making_graph(shapefile):
+ with patch("gerrychain.graph.geo.explain_validity") as explain:
+ explain.return_value = "Invalid geometry"
+ assert Graph.from_file(shapefile, ignore_errors=True)
+
+
+def test_data_and_geometry(gdf_with_data):
+ df = gdf_with_data
+ graph = Graph.from_geodataframe(df, cols_to_add=["data", "data2"])
+ assert graph.geometry is df.geometry
+ # graph.add_data(df[["data"]])
+ assert (graph.data["data"] == df["data"]).all()
+ # graph.add_data(df[["data2"]])
+ assert list(graph.data.columns) == ["data", "data2"]
+
+
+def test_make_graph_from_dataframe_has_crs(gdf_with_data):
+ graph = Graph.from_geodataframe(gdf_with_data)
+ assert CRS.from_json(graph.graph["crs"]).equals(gdf_with_data.crs)
+
+
+def test_make_graph_from_shapefile_has_crs(shapefile):
+ graph = Graph.from_file(shapefile)
+ df = gp.read_file(shapefile)
+ assert CRS.from_json(graph.graph["crs"]).equals(df.crs)
diff --git a/tests/frm_tests/test_frm_nx_rx_graph.py b/tests/frm_tests/test_frm_nx_rx_graph.py
new file mode 100644
index 00000000..170fc56f
--- /dev/null
+++ b/tests/frm_tests/test_frm_nx_rx_graph.py
@@ -0,0 +1,242 @@
+#######################################################
+# Overview of test_frm_nx_rx_graph.py
+#######################################################
+"""
+
+A collection of tests to verify that the new GerryChain
+Graph object works the same with NetworkX and RustworkX.
+
+
+"""
+
+import os
+
+# Set the random seed so that the results are reproducible!
+import random
+
+import pytest
+import rustworkx as rx
+
+from gerrychain import Graph
+
+random.seed(2024)
+
+############################################################
+# Create Graph Objects - both direct NX.Graph and RX.PyGraph
+# objects and two GerryChain Graph objects that embed the
+# NX and RX graphs.
+############################################################
+
+
+@pytest.fixture(scope="module")
+def json_file_path():
+ # Get path to the JSON containing graph data
+ test_file_path = os.path.abspath(__file__)
+ cur_directory = os.path.dirname(test_file_path)
+ path_for_json_file = os.path.join(cur_directory, "gerrymandria.json")
+ # print("json file is: ", json_file_path)
+ return path_for_json_file
+
+
+@pytest.fixture(scope="module")
+def gerrychain_nx_graph(json_file_path):
+ # Create an NX based Graph object from the JSON
+ graph = Graph.from_json(json_file_path)
+ print("gerrychain_nx_graph: len(graph): ", len(graph))
+ return graph
+
+
+@pytest.fixture(scope="module")
+def nx_graph(gerrychain_nx_graph):
+ # Fetch the NX graph object from inside the Graph object
+ return gerrychain_nx_graph.get_nx_graph()
+
+
+@pytest.fixture(scope="module")
+def rx_graph(nx_graph):
+ # Create an RX graph object from NX, preserving node data
+ return rx.networkx_converter(nx_graph, keep_attributes=True)
+
+
+@pytest.fixture(scope="module")
+def gerrychain_rx_graph(rx_graph):
+ # Create a Graph object with an RX graph inside
+ return Graph.from_rustworkx(rx_graph)
+
+
+##################
+# Start of Tests
+##################
+
+
+def test_sanity():
+ # frm: if you call pytest with -rP, then it will show stdout for tests
+ print("test_sanity(): called")
+ assert True
+
+
+def test_nx_rx_sets_of_nodes_agree(nx_graph, rx_graph):
+ nx_set_of_nodes = set(nx_graph.nodes())
+ rx_set_of_nodes = set(rx_graph.node_indices())
+ assert nx_set_of_nodes == rx_set_of_nodes
+
+
+def test_nx_rx_node_data_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ nx_data_dict = gerrychain_nx_graph.node_data(1)
+ rx_data_dict = gerrychain_rx_graph.node_data(1)
+ assert nx_data_dict == rx_data_dict
+
+
+def test_nx_rx_node_indices_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ nx_node_indices = gerrychain_nx_graph.node_indices
+ rx_node_indices = gerrychain_rx_graph.node_indices
+ assert nx_node_indices == rx_node_indices
+
+
+def test_nx_rx_edges_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ # TODO: Testing: Rethink this test. At the moment it relies on the edge_list()
+ # call which does not exist on a GerryChain Graph object
+ # being handled by RX through clever __getattr__ stuff.
+ # I think we should add an edge_list() method to GerryChain Graph
+ nx_edges = set(gerrychain_nx_graph.edges)
+ rx_edges = set(gerrychain_rx_graph.edge_list())
+ assert nx_edges == rx_edges
+
+
+def test_nx_rx_node_neighbors_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ for i in gerrychain_nx_graph:
+ # Need to convert to set, because ordering of neighbor nodes differs in the lists
+ nx_neighbors = set(gerrychain_nx_graph.neighbors(i))
+ rx_neighbors = set(gerrychain_rx_graph.neighbors(i))
+ assert nx_neighbors == rx_neighbors
+
+
+def test_nx_rx_subgraphs_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ subgraph_nodes = [
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ ] # TODO: Testing: make this a fixture dependent on JSON graph
+ nx_subgraph = gerrychain_nx_graph.subgraph(subgraph_nodes)
+ rx_subgraph = gerrychain_rx_graph.subgraph(subgraph_nodes)
+ for node_id in nx_subgraph:
+ nx_node_data = nx_subgraph.node_data(node_id)
+ rx_node_data = rx_subgraph.node_data(node_id)
+ assert nx_node_data == rx_node_data
+ # frm: TODO: Testing: This does not test that the rx_subgraph has the exact same number of
+ # nodes as the nx_subgraph, and it does not test edge data...
+
+
+def test_nx_rx_degrees_agree(gerrychain_nx_graph, gerrychain_rx_graph):
+ # Verify that the degree of each node agrees between NX and RX versions
+ nx_degrees = {
+ node_id: gerrychain_nx_graph.degree(node_id) for node_id in gerrychain_nx_graph.node_indices
+ }
+ rx_degrees = {
+ node_id: gerrychain_rx_graph.degree(node_id) for node_id in gerrychain_rx_graph.node_indices
+ }
+ for node_id in gerrychain_nx_graph.node_indices:
+ assert nx_degrees[node_id] == rx_degrees[node_id]
+
+
+"""
+frm: TODO: Testing:
+
+ * Functions:
+ * predecessors()
+ * successors()
+ * is_connected()
+ * laplacian_matrix()
+ * normalized_laplacian_matrix()
+ * neighbors()
+ I think this has been done for both NX and RX
+ * networkx.generators.lattice.grid_2d_graph()
+ * nx.to_dict_of_lists()
+ * nx.tree.minimum_spanning_tree()
+ * nx.number_connected_components()
+ * nx.set_edge_attributes()
+ * nx.set_node_attributes()
+
+ * Syntax:
+ * graph.edges
+ NX - note that edges and edges() do exactly the same thing. They return
+ an EdgeView of a list of edges with edge_id being a tuple indicating
+ the start and end node_ids for the edge.
+ Need to find out how edges and edges() is used in the code to know
+ what the right thing to do is for RX - that is, what aspect of an
+ EdgeView is used in the code? Is a set of tuples OK?
+ * graph.nodes
+ NX returns a NodeView with the node_ids for the nodes
+ RX does not have a "nodes" attribute, but it does have a nodes()
+ method which does something different. It returns a list (indexed
+ by node_id) of the data associated with nodes.
+ So, I need to see how Graph.nodes is used in the code to see what the
+ right way is to support it in RX.
+ * graph.nodes[node_id]
+ returns data dictionary for the node
+ * graph.nodes[node_id][attr_id]
+ returns the value for the given attribute for that node's data
+ * graph.add_edge()
+ Done differently in NX and RX
+ * graph.degree
+ * graph.subgraph
+ * for edge in graph.edge_indices:
+ graph.edges[edge]["weight"] = random.random()
+ In RX, assigning the weight to an edge is done differently...
+ Note that edge_indices currently works exactly the same for both
+ NX and RX - returning a set of tuples (for edges). However,
+ assigning a value to the "weight" attribute of an edge is done
+ differently...
+ * islands()
+"""
+
+
+### my_updaters = {
+### "population": updaters.Tally("TOTPOP"),
+### "cut_edges": updaters.cut_edges
+### }
+###
+### initial_partition = Partition(
+### nx_graph,
+### assignment="district",
+### updaters=my_updaters
+### )
+###
+### # This should be 8 since each district has 1 person in it.
+### # Note that the key "population" corresponds to the population updater
+### # that we defined above and not with the population column in the json file.
+### ideal_population = sum(initial_partition["population"].values()) / len(initial_partition)
+###
+### proposal = partial(
+### recom,
+### pop_col="TOTPOP",
+### pop_target=ideal_population,
+### epsilon=0.01,
+### node_repeats=2
+### )
+###
+### print("Got proposal")
+###
+### recom_chain = MarkovChain(
+### proposal=proposal,
+### constraints=[contiguous],
+### accept=accept.always_accept,
+### initial_state=initial_partition,
+### total_steps=40
+### )
+###
+### print("Set up Markov Chain")
+###
+### assignment_list = []
+###
+### for i, item in enumerate(recom_chain):
+### print(f"Finished step {i+1}/{len(recom_chain)}")
+### assignment_list.append(item.assignment)
+###
+### print("Enumerated the chain: number of entries in list is: ", len(assignment_list))
+###
+### def test_success():
+### len(assignment_list) == 40
diff --git a/tests/frm_tests/test_frm_regression.py b/tests/frm_tests/test_frm_regression.py
new file mode 100644
index 00000000..e42ea264
--- /dev/null
+++ b/tests/frm_tests/test_frm_regression.py
@@ -0,0 +1,64 @@
+###############################################################
+#
+# frm: Overview of test_frm_regression.py
+#
+# This code was copied from the GerryChain User Guide / Tutorial as a way
+# to have a functional test that exercised the overall logic of GerryChain.
+#
+# It is NOT comprehensive, but it does get all the way to executing
+# a chain.
+#
+# It is a quick and dirty way to make sure I haven't really screwed things up ;-)
+#
+
+import os
+
+# Set the random seed so that the results are reproducible!
+import random
+from functools import partial
+
+from gerrychain import Graph, MarkovChain, Partition, accept, updaters
+from gerrychain.constraints import contiguous
+from gerrychain.proposals import recom
+
+random.seed(2024)
+
+
+test_file_path = os.path.abspath(__file__)
+cur_directory = os.path.dirname(test_file_path)
+json_file_path = os.path.join(cur_directory, "gerrymandria.json")
+
+graph = Graph.from_json(json_file_path)
+
+my_updaters = {"population": updaters.Tally("TOTPOP"), "cut_edges": updaters.cut_edges}
+
+initial_partition = Partition(graph, assignment="district", updaters=my_updaters)
+
+# This should be 8 since each district has 1 person in it.
+# Note that the key "population" corresponds to the population updater
+# that we defined above and not with the population column in the json file.
+ideal_population = sum(initial_partition["population"].values()) / len(initial_partition)
+
+proposal = partial(
+ recom, pop_col="TOTPOP", pop_target=ideal_population, epsilon=0.01, node_repeats=2
+)
+
+recom_chain = MarkovChain(
+ proposal=proposal,
+ constraints=[contiguous],
+ accept=accept.always_accept,
+ initial_state=initial_partition,
+ total_steps=40,
+)
+
+assignment_list = []
+
+for i, item in enumerate(recom_chain):
+ print(f"Finished step {i+1}/{len(recom_chain)}")
+ assignment_list.append(item.assignment)
+
+print("Enumerated the chain: number of entries in list is: ", len(assignment_list))
+
+
+def test_success():
+ len(assignment_list) == 40
diff --git a/tests/frm_tests/test_to_networkx_graph.py b/tests/frm_tests/test_to_networkx_graph.py
new file mode 100644
index 00000000..9ac31ab4
--- /dev/null
+++ b/tests/frm_tests/test_to_networkx_graph.py
@@ -0,0 +1,179 @@
+#
+# This tests whether the routine, to_networkx_graph(), works
+# properly.
+#
+# This routine extracts a new NetworkX.Graph object from an
+# Graph object that is based on RustworkX. When we create
+# a Partition object from an NetworkX Graph we convert the
+# graph to RustworkX for performance. However, users might
+# want to have access to a NetworkX Graph for a variety of
+# reasons: mostly because they built their initial graph as
+# a NetworkX Graph and they used node_ids that made sense to
+# them at the time and would like to access the graph at
+# the end of a MarkovChain run using those same "original"
+# IDs.
+#
+# The extracted NetworkX Graph should have the "original"
+# node_ids, and it should have all of the node and edge
+# data that was in the RustworkX Graph object.
+#
+
+
+import networkx as nx
+
+from gerrychain.graph import Graph
+from gerrychain.partition import Partition
+
+
+def test_to_networkx_graph_works():
+ """
+ Create an NX graph (grid) that looks like this:
+
+ 'A' 'B' 'C'
+ 'D' 'E' 'F'
+ 'G' 'H' 'I'
+ """
+
+ nx_graph = nx.Graph()
+ nx_graph.add_edges_from(
+ [
+ ("A", "B"),
+ ("A", "D"),
+ ("B", "C"),
+ ("B", "E"),
+ ("C", "F"),
+ ("D", "E"),
+ ("D", "G"),
+ ("E", "F"),
+ ("E", "H"),
+ ("F", "I"),
+ ("G", "H"),
+ ("H", "I"),
+ ]
+ )
+
+ # Add some node and edge data to the nx_graph
+
+ graph_node_ids = ["A", "B", "C", "D", "E", "F", "G", "H", "I"]
+ for node_id in graph_node_ids:
+ nx_graph.nodes[node_id]["nx-node-data"] = node_id
+
+ nx_graph.edges[("A", "B")]["nx-edge-data"] = ("A", "B")
+ nx_graph.edges[("A", "D")]["nx-edge-data"] = ("A", "D")
+ nx_graph.edges[("B", "C")]["nx-edge-data"] = ("B", "C")
+ nx_graph.edges[("B", "E")]["nx-edge-data"] = ("B", "E")
+ nx_graph.edges[("C", "F")]["nx-edge-data"] = ("C", "F")
+ nx_graph.edges[("D", "E")]["nx-edge-data"] = ("D", "E")
+ nx_graph.edges[("D", "G")]["nx-edge-data"] = ("D", "G")
+ nx_graph.edges[("E", "F")]["nx-edge-data"] = ("E", "F")
+ nx_graph.edges[("E", "H")]["nx-edge-data"] = ("E", "H")
+ nx_graph.edges[("F", "I")]["nx-edge-data"] = ("F", "I")
+ nx_graph.edges[("G", "H")]["nx-edge-data"] = ("G", "H")
+ nx_graph.edges[("H", "I")]["nx-edge-data"] = ("H", "I")
+
+ graph = Graph.from_networkx(nx_graph)
+
+ """
+ Create a partition assigning each "row" of
+ nodes to a part (district), so the assignment
+ looks like:
+
+ 0 0 0
+ 1 1 1
+ 2 2 2
+ """
+
+ initial_assignment = {
+ "A": 0,
+ "B": 0,
+ "C": 0,
+ "D": 1,
+ "E": 1,
+ "F": 1,
+ "G": 2,
+ "H": 2,
+ "I": 2,
+ }
+
+ # Create a partition
+ partition = Partition(graph, initial_assignment)
+
+ # The partition's graph object has been converted to be based on RX
+ new_graph = partition.graph
+
+ # Add some additional data
+ for node_id in new_graph.node_indices:
+ new_graph.node_data(node_id)["internal-node-data"] = (
+ new_graph.original_nx_node_id_for_internal_node_id(node_id)
+ )
+ for edge_id in new_graph.edge_indices:
+ new_graph.edge_data(edge_id)["internal-edge-data"] = "internal-edge-data"
+
+ # Now create a second partition by flipping the
+ # nodes in the first row to be in part (district) 1
+
+ """
+ The new partition's mapping of nodes to parts should look like this:
+
+ 1 1 1
+ 1 1 1
+ 2 2 2
+ """
+
+ flips = {"A": 1, "B": 1, "C": 1}
+ # Create a new partition based on these flips - using "original" node_ids
+ new_partition = partition.flip(flips, use_original_nx_node_ids=True)
+
+ # Get the NX graph after doing the flips.
+ extracted_nx_graph = new_partition.graph.to_networkx_graph()
+
+ # Get the assignments for both the initial partition and the new_partition
+
+ internal_assignment_0 = partition.assignment
+ internal_assignment_1 = new_partition.assignment
+
+ # convert the internal assignments into "original" node_ids
+ original_assignment_0 = {}
+ for node_id, part in internal_assignment_0.items():
+ original_nx_node_id = partition.graph.original_nx_node_id_for_internal_node_id(node_id)
+ original_assignment_0[original_nx_node_id] = part
+ original_assignment_1 = {}
+ for node_id, part in internal_assignment_1.items():
+ original_nx_node_id = partition.graph.original_nx_node_id_for_internal_node_id(node_id)
+ original_assignment_1[original_nx_node_id] = part
+
+ # Check that all is well...
+
+ # Check that the initial assignment is the same as the internal RX-based assignment
+ for node_id, part in initial_assignment.items():
+ assert part == original_assignment_0[node_id]
+
+ # Check that the flips did what they were supposed to do
+ for node_id in ["A", "B", "C", "D", "E", "F"]:
+ assert original_assignment_1[node_id] == 1
+ for node_id in ["G", "H", "I"]:
+ assert original_assignment_1[node_id] == 2
+
+ # Check that the node and edge data is present
+
+ # Check node data
+ for node_id in extracted_nx_graph.nodes:
+ # Data assigned to the NX-Graph should still be there...
+ assert (
+ extracted_nx_graph.nodes[node_id]["nx-node-data"]
+ == nx_graph.nodes[node_id]["nx-node-data"]
+ )
+ # Data assigned to the partition's RX-Graph should still be there...
+ assert extracted_nx_graph.nodes[node_id]["internal-node-data"] == node_id
+ # Node_id agrees with __networkx_node__ (created by RX conversion)
+ assert node_id == extracted_nx_graph.nodes[node_id]["__networkx_node__"]
+
+ # Check edge data
+ for edge in extracted_nx_graph.edges:
+ assert (
+ extracted_nx_graph.edges[edge]["nx-edge-data"] == nx_graph.edges[edge]["nx-edge-data"]
+ )
+ # Data assigned to the partition's RX-Graph should still be there...
+ assert extracted_nx_graph.edges[edge]["internal-edge-data"] == "internal-edge-data"
+ # compare the extracted_nx_graph's nodes and edges to see if they make sense
+ # Compare node_data and edge_data
diff --git a/tests/meta/test_diversity.py b/tests/meta/test_diversity.py
index 86d80b3b..463e1d93 100644
--- a/tests/meta/test_diversity.py
+++ b/tests/meta/test_diversity.py
@@ -1,10 +1,9 @@
import networkx
-from gerrychain.partition import GeographicPartition, Partition
-from gerrychain.proposals import propose_random_flip
from gerrychain.graph import Graph
+from gerrychain.meta import DiversityStats, collect_diversity_stats
+from gerrychain.partition import Partition
from gerrychain.updaters import cut_edges
-from gerrychain.meta import collect_diversity_stats, DiversityStats
def test_stats_one_step():
diff --git a/tests/metrics/test_compactness.py b/tests/metrics/test_compactness.py
index fb8d2a94..ea3f9798 100644
--- a/tests/metrics/test_compactness.py
+++ b/tests/metrics/test_compactness.py
@@ -1,4 +1,5 @@
import math
+
from gerrychain.metrics.compactness import compute_polsby_popper
diff --git a/tests/metrics/test_partisan.py b/tests/metrics/test_partisan.py
index 9efcbe4f..4c4554e0 100644
--- a/tests/metrics/test_partisan.py
+++ b/tests/metrics/test_partisan.py
@@ -1,11 +1,13 @@
-import pytest
from unittest.mock import MagicMock
+
+import pytest
+
from gerrychain.metrics import (
efficiency_gap,
- wasted_votes,
mean_median,
partisan_bias,
partisan_gini,
+ wasted_votes,
)
from gerrychain.updaters.election import ElectionResults
@@ -56,7 +58,7 @@ def test_mean_median_has_right_value(mock_election):
def test_signed_partisan_scores_are_positive_if_first_party_has_advantage(
- mock_election
+ mock_election,
):
eg = efficiency_gap(mock_election)
mm = mean_median(mock_election)
diff --git a/tests/optimization/test_gingleator.py b/tests/optimization/test_gingleator.py
index 62862615..1153ffbf 100644
--- a/tests/optimization/test_gingleator.py
+++ b/tests/optimization/test_gingleator.py
@@ -1,12 +1,14 @@
+import random
+from functools import partial
+
+import numpy as np
+import pytest
+
from gerrychain import Partition
-from gerrychain.optimization import Gingleator
from gerrychain.constraints import contiguous
+from gerrychain.optimization import Gingleator
from gerrychain.proposals import recom
from gerrychain.updaters import Tally
-from functools import partial
-import pytest
-import numpy as np
-import random
random.seed(2024)
@@ -44,8 +46,7 @@ def gingleator_test_partition(four_by_five_grid_for_opt):
"population": Tally("population", alias="population"),
"MVAP": Tally("MVAP", alias="MVAP"),
"m_perc": lambda p_dict: {
- key: p_dict["MVAP"][key] / p_dict["population"][key]
- for key in p_dict["MVAP"]
+ key: p_dict["MVAP"][key] / p_dict["population"][key] for key in p_dict["MVAP"]
},
"my_cut_edges": simple_cut_edge_count,
},
@@ -77,7 +78,7 @@ def test_ginglator_needs_min_perc_or_min_pop_col(four_by_five_grid_for_opt):
)
with pytest.raises(ValueError) as gingle_err:
- gingles = Gingleator(
+ _ = Gingleator(
proposal=proposal,
constraints=[contiguous],
initial_state=initial_partition,
@@ -101,8 +102,7 @@ def test_ginglator_warns_if_min_perc_and_min_pop_col_set(four_by_five_grid_for_o
"population": Tally("population", alias="population"),
"MVAP": Tally("MVAP", alias="MVAP"),
"m_perc": lambda p_dict: {
- key: p_dict["MVAP"][key] / p_dict["population"][key]
- for key in p_dict["MVAP"]
+ key: p_dict["MVAP"][key] / p_dict["population"][key] for key in p_dict["MVAP"]
},
"my_cut_edges": simple_cut_edge_count,
},
@@ -119,7 +119,7 @@ def test_ginglator_warns_if_min_perc_and_min_pop_col_set(four_by_five_grid_for_o
)
with pytest.warns() as record:
- gingles = Gingleator(
+ _ = Gingleator(
proposal=proposal,
constraints=[contiguous],
initial_state=initial_partition,
@@ -203,20 +203,14 @@ def test_reward_next_highest_close(four_by_five_grid_for_opt):
assert Gingleator.reward_next_highest_close(initial_partition, "m_perc", 0.5) == 2
# Rounding needed here because of floating point arithmetic
assert (
- round(
- Gingleator.reward_next_highest_close(initial_partition, "m_perc", 0.29), 5
- )
- == 2 + 0.1
+ round(Gingleator.reward_next_highest_close(initial_partition, "m_perc", 0.29), 5) == 2 + 0.1
)
def test_penalize_maximum_over(four_by_five_grid_for_opt):
initial_partition = gingleator_test_partition(four_by_five_grid_for_opt)
- assert (
- Gingleator.penalize_maximum_over(initial_partition, "m_perc", 0.5)
- == 2.0 + 0.48 / 0.50
- )
+ assert Gingleator.penalize_maximum_over(initial_partition, "m_perc", 0.5) == 2.0 + 0.48 / 0.50
assert Gingleator.penalize_maximum_over(initial_partition, "m_perc", 0.6) == 0
@@ -224,9 +218,6 @@ def test_penalize_maximum_over(four_by_five_grid_for_opt):
def test_penalize_avg_over(four_by_five_grid_for_opt):
initial_partition = gingleator_test_partition(four_by_five_grid_for_opt)
- assert (
- Gingleator.penalize_avg_over(initial_partition, "m_perc", 0.5)
- == 2.0 + 0.48 / 0.50
- )
+ assert Gingleator.penalize_avg_over(initial_partition, "m_perc", 0.5) == 2.0 + 0.48 / 0.50
assert Gingleator.penalize_avg_over(initial_partition, "m_perc", 0.6) == 0
diff --git a/tests/optimization/test_single_metric.py b/tests/optimization/test_single_metric.py
index e9868492..ad47ad3e 100644
--- a/tests/optimization/test_single_metric.py
+++ b/tests/optimization/test_single_metric.py
@@ -1,11 +1,13 @@
+import random
+from functools import partial
+
+import numpy as np
+
from gerrychain import Partition
-from gerrychain.optimization import SingleMetricOptimizer
from gerrychain.constraints import contiguous
+from gerrychain.optimization import SingleMetricOptimizer
from gerrychain.proposals import recom
from gerrychain.updaters import Tally
-from functools import partial
-import numpy as np
-import random
random.seed(2024)
diff --git a/tests/partition/test_assignment.py b/tests/partition/test_assignment.py
index c0c55b36..4ecac479 100644
--- a/tests/partition/test_assignment.py
+++ b/tests/partition/test_assignment.py
@@ -1,8 +1,9 @@
+from collections.abc import Mapping
+
import pandas
import pytest
from gerrychain.partition.assignment import Assignment, get_assignment
-from collections.abc import Mapping
@pytest.fixture
diff --git a/tests/partition/test_partition.py b/tests/partition/test_partition.py
index d9e42dbc..c68552ca 100644
--- a/tests/partition/test_partition.py
+++ b/tests/partition/test_partition.py
@@ -5,13 +5,20 @@
import networkx
import pytest
+from gerrychain.graph import Graph
from gerrychain.partition import GeographicPartition, Partition
from gerrychain.proposals import propose_random_flip
-from gerrychain.graph import Graph
from gerrychain.updaters import cut_edges
def test_Partition_can_be_flipped(example_partition):
+ # frm: TODO: Testing: Verify that this flip is in internal RX-based graph node_ids and not "original" NX node_ids
+ #
+ # My guess is that this flip is intended to be in original node_ids but that the test works
+ # anyways because the assertion uses the same numbers. It should probably be changed to use
+ # original node_ids and to translate the node_id and part in the assert into internal node_ids
+ # just to make it crystal clear to anyone following later what is going on...
+
flip = {1: 2}
new_partition = example_partition.flip(flip)
assert new_partition.assignment[1] == 2
@@ -45,6 +52,9 @@ def test_Partition_knows_cut_edges_K3(example_partition):
def test_propose_random_flip_proposes_a_partition(example_partition):
partition = example_partition
+
+ # frm: TODO: Testing: Verify that propose_random_flip() to make sure it is doing the right thing
+ # wrt RX-based node_ids vs. original node_ids.
proposal = propose_random_flip(partition)
assert isinstance(proposal, partition.__class__)
@@ -54,10 +64,10 @@ def example_geographic_partition():
graph = Graph.from_networkx(networkx.complete_graph(3))
assignment = {0: 1, 1: 1, 2: 2}
for node in graph.nodes:
- graph.nodes[node]["boundary_node"] = False
- graph.nodes[node]["area"] = 1
+ graph.node_data(node)["boundary_node"] = False
+ graph.node_data(node)["area"] = 1
for edge in graph.edges:
- graph.edges[edge]["shared_perim"] = 1
+ graph.edge_data(edge)["shared_perim"] = 1
return GeographicPartition(graph, assignment, None, None, None)
@@ -69,15 +79,32 @@ def test_geographic_partition_can_be_instantiated(example_geographic_partition):
def test_Partition_parts_is_a_dictionary_of_parts_to_nodes(example_partition):
partition = example_partition
flip = {1: 2}
- new_partition = partition.flip(flip)
+ new_partition = partition.flip(flip, use_original_nx_node_ids=True)
assert all(isinstance(nodes, frozenset) for nodes in new_partition.parts.values())
assert all(isinstance(nodes, frozenset) for nodes in partition.parts.values())
def test_Partition_has_subgraphs(example_partition):
+ # Test that subgraphs work as intended.
+ # The partition has two parts (districts) with IDs: 1, 2
+ # Part #1 has nodes 0, 1, so the subgraph for part #1 should have these nodes
+ # Part #2 has node 2, so the subgraph for part #1 should have this node
+
+ # Note that the original node_ids are based on the original NX-based graph
+ # The node_ids in the partition's graph have been changed by the conversion
+ # from NX to RX, so we need to be careful about when to use "original" node_ids
+ # and when to use "internal" RX-based node_ids
+
partition = example_partition
- assert set(partition.subgraphs[1].nodes) == {0, 1}
- assert set(partition.subgraphs[2].nodes) == {2}
+
+ subgraph_for_part_1 = partition.subgraphs[1]
+ internal_node_id_0 = subgraph_for_part_1.internal_node_id_for_original_nx_node_id(0)
+ internal_node_id_1 = subgraph_for_part_1.internal_node_id_for_original_nx_node_id(1)
+ assert set(partition.subgraphs[1].nodes) == {internal_node_id_0, internal_node_id_1}
+
+ subgraph_for_part_2 = partition.subgraphs[2]
+ internal_node_id = subgraph_for_part_2.internal_node_id_for_original_nx_node_id(2)
+ assert set(partition.subgraphs[2].nodes) == {internal_node_id}
assert len(list(partition.subgraphs)) == 2
@@ -92,10 +119,22 @@ def test_partition_implements_getattr_for_updater_access(example_partition):
def test_can_be_created_from_a_districtr_file(graph, districtr_plan_file):
for node in graph:
- graph.nodes[node]["area_num_1"] = node
+ graph.node_data(node)["area_num_1"] = node
+
+ # frm: TODO: Testing: NX vs. RX node_id issues here...
partition = Partition.from_districtr_file(graph, districtr_plan_file)
- assert partition.assignment.to_dict() == {
+
+ # Convert internal node_ids of the partition's graph to "original" node_ids
+ internal_node_assignment = partition.assignment.to_dict()
+ original_node_assignment = {}
+ for internal_node_id, part in internal_node_assignment.items():
+ original_nx_node_id = partition.graph.original_nx_node_id_for_internal_node_id(
+ internal_node_id
+ )
+ original_node_assignment[original_nx_node_id] = part
+
+ assert original_node_assignment == {
0: 1,
1: 1,
2: 1,
diff --git a/tests/partition/test_plotting.py b/tests/partition/test_plotting.py
index b9916319..7a0e7124 100644
--- a/tests/partition/test_plotting.py
+++ b/tests/partition/test_plotting.py
@@ -1,6 +1,7 @@
from unittest.mock import MagicMock
import geopandas as gp
+import networkx
import pytest
from shapely.geometry import Polygon
@@ -9,7 +10,8 @@
@pytest.fixture
def partition():
- graph = Graph([(0, 1), (1, 3), (2, 3), (0, 2)])
+ nx_graph = networkx.Graph([(0, 1), (1, 3), (2, 3), (0, 2)])
+ graph = Graph.from_networkx(nx_graph)
return Partition(graph, {0: 1, 1: 1, 2: 2, 3: 2})
@@ -66,5 +68,34 @@ def test_uses_graph_geometries_by_default(self, geodataframe):
graph = Graph.from_geodataframe(geodataframe)
partition = Partition(graph=graph, assignment={node: 0 for node in graph})
+
+ # frm: TODO: Testing: how to handle geometry?
+ #
+ # Originally, the following statement blew up because we do not copy
+ # geometry data from NX to RX when we convert to RX.
+ #
+ # I said at the time:
+ # Need to grok what the right way to deal with geometry
+ # data is (is it only an issue for from_geodataframe() or
+ # are there other ways a geometry value might be set?)
+ #
+ # Peter comments (from PR):
+ #
+ # The geometry data should only exist on the attached geodataframe.
+ # In fact, if there is no "geometry" column in the dataframe, this call
+ # should fail.
+ #
+ # Fixing the plotting functions is a low-priority. I need to set up
+ # snapshot tests for these anyway, so if you find working with
+ # matplotlib a PITA (because it is), then don't worry about the
+ # plotting functions for now.
+ #
+ # Worst-case scenario, I can just add some temporary verbage to
+ # readthedocs telling people to use
+ #
+ # my_partition.df.plot()
+
+ # Which will just use all of the plotting stuff that Pandas has set up internally.
+
partition.plot()
assert mock_plot.call_count == 1
diff --git a/tests/test_chain.py b/tests/test_chain.py
index 3910df2f..2a437b83 100644
--- a/tests/test_chain.py
+++ b/tests/test_chain.py
@@ -4,12 +4,12 @@
class MockState:
- def flip(self, changes):
+ def flip(self, changes, use_original_nx_node_ids):
return MockState()
def mock_proposal(state):
- return state.flip({1: 2})
+ return state.flip({1: 2}, use_original_nx_node_ids=True)
def mock_accept(state):
@@ -23,9 +23,7 @@ def mock_is_valid(state):
def test_MarkovChain_runs_only_total_steps_times():
for total_steps in range(1, 11):
initial = MockState()
- chain = MarkovChain(
- mock_proposal, mock_is_valid, mock_accept, initial, total_steps
- )
+ chain = MarkovChain(mock_proposal, mock_is_valid, mock_accept, initial, total_steps)
counter = 0
for state in chain:
assert isinstance(state, MockState)
@@ -38,9 +36,7 @@ def test_MarkovChain_runs_only_total_steps_times():
def test_MarkovChain_returns_the_initial_state_first():
initial = MagicMock()
- chain = MarkovChain(
- mock_proposal, mock_is_valid, mock_accept, initial, total_steps=10
- )
+ chain = MarkovChain(mock_proposal, mock_is_valid, mock_accept, initial, total_steps=10)
counter = 0
for state in chain:
diff --git a/tests/test_frm_graph.py b/tests/test_frm_graph.py
new file mode 100644
index 00000000..436192f1
--- /dev/null
+++ b/tests/test_frm_graph.py
@@ -0,0 +1,688 @@
+import networkx as nx
+import pytest
+import rustworkx as rx
+
+from gerrychain import Graph
+
+###############################################
+# This file contains tests routines in graph.py
+###############################################
+
+
+@pytest.fixture
+def four_by_five_grid_nx():
+
+ # Create an NX Graph object with attributes
+ #
+ # This graph has the following properties
+ # which are important for the tests below:
+ #
+ # * The "nx_node_id" attribute serves as an
+ # effective "original" node_id so that we
+ # can track a node even when its internal
+ # node_id changes.
+ #
+ # * The graph has two "connected" components:
+ # the first two rows and the last two
+ # rows. This is used in the connected
+ # components tests
+
+ # nx_node_id
+ #
+ # 0 1 2 3 4
+ # 5 6 7 8 9
+ # 10 11 12 13 14
+ # 15 16 17 18 19
+
+ # MVAP:
+ #
+ # 2 2 2 2 2
+ # 2 2 2 2 2
+ # 2 2 2 2 2
+ # 2 2 2 2 2
+
+ nx_graph = nx.Graph()
+ nx_graph.add_nodes_from(
+ [
+ (0, {"population": 10, "nx_node_id": 0, "MVAP": 2}),
+ (1, {"population": 10, "nx_node_id": 1, "MVAP": 2}),
+ (2, {"population": 10, "nx_node_id": 2, "MVAP": 2}),
+ (3, {"population": 10, "nx_node_id": 3, "MVAP": 2}),
+ (4, {"population": 10, "nx_node_id": 4, "MVAP": 2}),
+ (5, {"population": 10, "nx_node_id": 5, "MVAP": 2}),
+ (6, {"population": 10, "nx_node_id": 6, "MVAP": 2}),
+ (7, {"population": 10, "nx_node_id": 7, "MVAP": 2}),
+ (8, {"population": 10, "nx_node_id": 8, "MVAP": 2}),
+ (9, {"population": 10, "nx_node_id": 9, "MVAP": 2}),
+ (10, {"population": 10, "nx_node_id": 10, "MVAP": 2}),
+ (11, {"population": 10, "nx_node_id": 11, "MVAP": 2}),
+ (12, {"population": 10, "nx_node_id": 12, "MVAP": 2}),
+ (13, {"population": 10, "nx_node_id": 13, "MVAP": 2}),
+ (14, {"population": 10, "nx_node_id": 14, "MVAP": 2}),
+ (15, {"population": 10, "nx_node_id": 15, "MVAP": 2}),
+ (16, {"population": 10, "nx_node_id": 16, "MVAP": 2}),
+ (17, {"population": 10, "nx_node_id": 17, "MVAP": 2}),
+ (18, {"population": 10, "nx_node_id": 18, "MVAP": 2}),
+ (19, {"population": 10, "nx_node_id": 19, "MVAP": 2}),
+ ]
+ )
+
+ nx_graph.add_edges_from(
+ [
+ (0, 1),
+ (0, 5),
+ (1, 2),
+ (1, 6),
+ (2, 3),
+ (2, 7),
+ (3, 4),
+ (3, 8),
+ (4, 9),
+ (5, 6),
+ # (5, 10),
+ (6, 7),
+ # (6, 11),
+ (7, 8),
+ # (7, 12),
+ (8, 9),
+ # (8, 13),
+ # (9, 14),
+ (10, 11),
+ (10, 15),
+ (11, 12),
+ (11, 16),
+ (12, 13),
+ (12, 17),
+ (13, 14),
+ (13, 18),
+ (14, 19),
+ (15, 16),
+ (16, 17),
+ (17, 18),
+ (18, 19),
+ ]
+ )
+
+ return nx_graph
+
+
+@pytest.fixture
+def four_by_five_grid_rx(four_by_five_grid_nx):
+ # Create an RX Graph object with attributes
+ rx_graph = rx.networkx_converter(four_by_five_grid_nx, keep_attributes=True)
+ return rx_graph
+
+
+def top_level_graph_is_properly_configured(graph):
+ # This routine tests that top-level graphs (not a subgraph)
+ # are properly configured
+ assert not graph._is_a_subgraph, "Top-level graph _is_a_subgraph is True"
+ assert hasattr(
+ graph, "_node_id_to_parent_node_id_map"
+ ), "Graph._node_id_to_parent_node_id_map is not set"
+ assert hasattr(
+ graph, "_node_id_to_original_nx_node_id_map"
+ ), "Graph._node_id_to_original_nx_node_id_map is not set"
+
+
+def test_from_networkx(four_by_five_grid_nx):
+ graph = Graph.from_networkx(four_by_five_grid_nx)
+ assert len(graph.node_indices) == 20, f"Expected 20 nodes but got {len(graph.node_indices)}"
+ assert len(graph.edge_indices) == 26, f"Expected 26 edges but got {len(graph.edge_indices)}"
+ assert (
+ graph.node_data(1)["population"] == 10
+ ), f"Expected population of 10 but got {graph.node_data(1)['population']}"
+ top_level_graph_is_properly_configured(graph)
+
+
+def test_from_rustworkx(four_by_five_grid_nx):
+ rx_graph = rx.networkx_converter(four_by_five_grid_nx, keep_attributes=True)
+ graph = Graph.from_rustworkx(rx_graph)
+ assert len(graph.node_indices) == 20, f"Expected 20 nodes but got {len(graph.node_indices)}"
+ assert (
+ graph.node_data(1)["population"] == 10
+ ), f"Expected population of 10 but got {graph.node_data(1)['population']}"
+ top_level_graph_is_properly_configured(graph)
+
+
+@pytest.fixture
+def four_by_five_graph_nx(four_by_five_grid_nx):
+ # Create an NX Graph object with attributes
+ graph = Graph.from_networkx(four_by_five_grid_nx)
+ return graph
+
+
+@pytest.fixture
+def four_by_five_graph_rx(four_by_five_grid_nx):
+ # Create an NX Graph object with attributes
+ #
+ # Instead of using from_rustworkx(), we use
+ # convert_from_nx_to_rx() because tests below
+ # depend on the node_id maps that are created
+ # by convert_from_nx_to_rx()
+ #
+ graph = Graph.from_networkx(four_by_five_grid_nx)
+ converted_graph = graph.convert_from_nx_to_rx()
+ return converted_graph
+
+
+def test_convert_from_nx_to_rx(four_by_five_graph_nx):
+ graph = four_by_five_graph_nx # more readable
+ converted_graph = graph.convert_from_nx_to_rx()
+
+ # Same number of nodes
+ assert len(graph.node_indices) == 20, f"Expected 20 nodes but got {len(graph.node_indices)}"
+ assert (
+ len(converted_graph.node_indices) == 20
+ ), f"Expected 20 nodes but got {len(graph.node_indices)}"
+
+ # Same number of edges
+ assert len(graph.edge_indices) == 26, f"Expected 26 edges but got {len(graph.edge_indices)}"
+ assert (
+ len(converted_graph.edge_indices) == 26
+ ), f"Expected 26 edges but got {len(graph.edge_indices)}"
+
+ # Node data is the same
+ # frm: TODO: Refactoring: Do this the clever Python way and test ALL at the same time
+ for node_id in graph.node_indices:
+ assert (
+ graph.node_data(node_id)["population"] == 10
+ ), f"Expected population of 10 but got {graph.node_data(node_id)['population']}"
+ assert (
+ graph.node_data(node_id)["nx_node_id"] == node_id
+ ), f"Expected nx_node_id of {node_id} but got {graph.node_data(node_id)['nx_node_id']}"
+ assert (
+ graph.node_data(node_id)["MVAP"] == 2
+ ), f"Expected MVAP of 2 but got {graph.node_data(node_id)['MVAP']}"
+ for node_id in converted_graph.node_indices:
+ assert (
+ graph.node_data(node_id)["population"] == 10
+ ), f"Expected population of 10 but got {graph.node_data(node_id)['population']}"
+ # frm: TODO: Code: Need to use node_id map to get appropriate node_ids for RX graph
+ # assert graph.node_data(node_id)["nx_node_id"] == node_id, \
+ # f"Expected nx_node_id of {node_id} but got {graph.node_data(node_id)['nx_node_id']}"
+ assert (
+ graph.node_data(node_id)["MVAP"] == 2
+ ), f"Expected MVAP of 2 but got {graph.node_data(node_id)['MVAP']}"
+
+ # Confirm that the node_id map to the "original" NX node_ids is correct
+ for node_id in converted_graph.nodes:
+ # get the "original" NX node_id
+ nx_node_id = converted_graph._node_id_to_original_nx_node_id_map[node_id]
+ # confirm that the converted node has "nx_node_id" set to the NX node_id. This
+ # is an artifact of the way the NX graph was constructed.
+ assert converted_graph.node_data(node_id)["nx_node_id"] == nx_node_id
+
+
+def test_get_edge_from_edge_id(four_by_five_graph_nx, four_by_five_graph_rx):
+
+ # Test that get_edge_from_edge_id works for both NX and RX based Graph objects
+
+ # NX edges and edge_ids are the same, so this first test is trivial
+ #
+ nx_edge_id = (0, 1)
+ nx_edge = four_by_five_graph_nx.get_edge_from_edge_id(nx_edge_id)
+ assert nx_edge == (0, 1)
+
+ # RX edge_ids are assigned arbitrarily, so without using the nx_to_rx_node_id_map
+ # we can't know which edge got what edge_id, so this test just verifies that
+ # there is an edge tuple associated with edge_id, 0
+ #
+ rx_edge_id = 0 # arbitrary id - but there is always an edge with id == 0
+ rx_edge = four_by_five_graph_rx.get_edge_from_edge_id(rx_edge_id)
+ assert isinstance(rx_edge[0], int), "RX edge does not exist (0)"
+ assert isinstance(rx_edge[1], int), "RX edge does not exist (1)"
+
+
+def test_get_edge_id_from_edge(four_by_five_graph_nx, four_by_five_graph_rx):
+
+ # Test that get_edge_id_from_edge works for both NX and RX based Graph objects
+
+ # NX edges and edge_ids are the same, so this first test is trivial
+ #
+ nx_edge = (0, 1)
+ nx_edge_id = four_by_five_graph_nx.get_edge_id_from_edge(nx_edge)
+ assert nx_edge_id == (0, 1)
+
+ # Test that get_edge_id_from_edge returns an integer value and that
+ # when that value is used to retrieve an edge tuple, we get the
+ # tuple value that is expected
+ #
+ rx_edge = (0, 1)
+ rx_edge_id = four_by_five_graph_rx.get_edge_id_from_edge(rx_edge)
+ assert isinstance(rx_edge_id, int), "Edge ID not found for edge"
+ found_rx_edge = four_by_five_graph_rx.get_edge_from_edge_id(rx_edge_id)
+ assert found_rx_edge == rx_edge, "Edge ID does not yield correct edge value"
+
+
+def test_add_edge():
+ # At present (October 2025), there is nothing to test. The
+ # code just delegates to NetworkX or RustworkX to create
+ # the edge.
+ #
+ # However, it is conceivable that in the future, when users
+ # stop using NX altogether, there might be a reason for a
+ # test, so this is just a placeholder for that future test...
+ #
+ assert True
+
+
+def test_subgraph(four_by_five_graph_rx):
+ """
+ Subgraphs are one of the most dangerous areas of the code.
+ In NX, subgraphs preserve node_ids - that is, the node_id
+ in the subgraph is the same as the node_id in the parent.
+ However, in RX, that is not the case - RX always creates
+ new node_ids starting at 0 and increasing by one
+ sequentially, so in general a node in an RX subgraph
+ will have a different node_id than it has in the parent
+ graph.
+
+ To deal with this, the code creates a map from the
+ node_id in a subgraph to the node_id in the parent
+ graph, _node_id_to_parent_node_id_map. This test verifies
+ that this map is properly created.
+
+ In addition, all RX based graphs that came from an NX
+ based graph record the "original" NX node_ids in
+ another node_id map, _node_id_to_original_nx_node_id_map
+
+ When we create a subgraph, this map needs to be
+ established for the subgraph. This test verifies
+ that this map is properly created.
+
+ Note that this test is only configured to work on
+ RX based Graph objects because the only uses of subgraph
+ in the gerrychain codebase is on RX based Graph objects.
+ """
+
+ # Create a subgraph for an arbitrary set of nodes:
+ subgraph_node_ids = [2, 4, 5, 8, 11, 13]
+ parent_graph_rx = four_by_five_graph_rx # make the code below clearer
+ subgraph_rx = parent_graph_rx.subgraph(subgraph_node_ids)
+
+ assert len(subgraph_node_ids) == len(subgraph_rx), "Number of nodes do not agree"
+
+ # verify that _node_id_to_parent_node_id_map is correct
+ for (
+ subgraph_node_id,
+ parent_node_id,
+ ) in subgraph_rx._node_id_to_parent_node_id_map.items():
+ # check that each node in subgraph has the same data (is the same node)
+ # as the node in the parent that it is mapped to
+ #
+ subgraph_stored_node_id = subgraph_rx.node_data(subgraph_node_id)["nx_node_id"]
+ subgraph_stored_node_id = subgraph_rx.node_data(subgraph_node_id)["nx_node_id"]
+ parent_stored_node_id = parent_graph_rx.node_data(parent_node_id)["nx_node_id"]
+ assert (
+ parent_stored_node_id == subgraph_stored_node_id
+ ), "_node_id_to_parent_node_id_map is incorrect"
+
+ # verify that _node_id_to_original_nx_node_id_map is correct
+ for (
+ subgraph_node_id,
+ original_node_id,
+ ) in subgraph_rx._node_id_to_original_nx_node_id_map.items():
+ subgraph_stored_node_id = subgraph_rx.node_data(subgraph_node_id)["nx_node_id"]
+ assert (
+ subgraph_stored_node_id == original_node_id
+ ), "_node_id_to_original_nx_node_id_map is incorrect"
+
+
+def test_num_connected_components(four_by_five_graph_nx, four_by_five_graph_rx):
+ num_components_nx = four_by_five_graph_nx.num_connected_components()
+ num_components_rx = four_by_five_graph_rx.num_connected_components()
+ assert num_components_nx == 2, f"num_components: expected 2 but got {num_components_nx}"
+ assert num_components_rx == 2, f"num_components: expected 2 but got {num_components_rx}"
+
+
+def test_subgraphs_for_connected_components(four_by_five_graph_nx, four_by_five_graph_rx):
+
+ subgraphs_nx = four_by_five_graph_nx.subgraphs_for_connected_components()
+ subgraphs_rx = four_by_five_graph_rx.subgraphs_for_connected_components()
+
+ assert len(subgraphs_nx) == 2
+ assert len(subgraphs_rx) == 2
+
+ assert len(subgraphs_nx[0]) == 10
+ assert len(subgraphs_nx[1]) == 10
+ assert len(subgraphs_rx[0]) == 10
+ assert len(subgraphs_rx[1]) == 10
+
+ # Check that each subgraph (NX-based Graph) has correct nodes in it
+ node_ids_nx_0 = subgraphs_nx[0].node_indices
+ node_ids_nx_1 = subgraphs_nx[1].node_indices
+ assert node_ids_nx_0 == {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
+ assert node_ids_nx_1 == {10, 11, 12, 13, 14, 15, 16, 17, 18, 19}
+
+ # Check that each subgraph (RX-based Graph) has correct nodes in it
+ node_ids_rx_0 = subgraphs_rx[0].node_indices
+ node_ids_rx_1 = subgraphs_rx[1].node_indices
+ original_nx_node_ids_rx_0 = subgraphs_rx[0].original_nx_node_ids_for_set(node_ids_rx_0)
+ original_nx_node_ids_rx_1 = subgraphs_rx[1].original_nx_node_ids_for_set(node_ids_rx_1)
+ assert original_nx_node_ids_rx_0 == {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}
+ assert original_nx_node_ids_rx_1 == {10, 11, 12, 13, 14, 15, 16, 17, 18, 19}
+
+
+def test_to_networkx_graph():
+ # There is already a test for this in another file
+ assert True
+
+
+def test_add_data():
+ # This is already tested in test_make_graph.py
+ assert True
+
+
+########################################################
+# Long utility routine to determine if there is a cycle
+# in a graph (with integer node_ids).
+########################################################
+
+
+def graph_has_cycle(set_of_edges):
+
+ #
+ # Given a set of edges that define a graph, determine
+ # if the graph has cycles.
+ #
+ # This will allow us to test that predecessors and
+ # successors are in fact trees with no cycles.
+ #
+ # The approach is to do a depth-first-search that
+ # remembers each node it has visited, and which
+ # signals that a cycle exists if it revisits a node
+ # it has already visited
+ #
+ # Note that this code assumes that the set of nodes
+ # is a sequential list starting at zero with no gaps
+ # in the sequence. This allows us to use a simplified
+ # adjacency matrix which is adequate for testing
+ # purposes.
+ #
+ # The adjacency matrix is just a 2D square matrix
+ # that has a 1 value for element (i,j) iff there
+ # is an edge from node i to node j. Note that
+ # because we deal with undirected graphs the matrix
+ # is symetrical - edges go both ways...
+ #
+
+ def add_edge(adj_matrix, s, t):
+ # Add an edge to an adjacency matrix
+ adj_matrix[s][t] = 1
+ adj_matrix[t][s] = 1 # Since it's an undirected graph
+
+ def delete_edge(adj_matrix, s, t):
+ # Delete an edge from an adjacency matrix
+ adj_matrix[s][t] = 0
+ adj_matrix[t][s] = 0 # Since it's an undirected graph
+
+ def create_empty_adjacency_matrix(num_nodes):
+ # create 2D array, num_nodes x num_nodes
+ adj_matrix = [[0] * num_nodes for _ in range(num_nodes)]
+ return adj_matrix
+
+ def create_adjacency_matrix_from_set_of_edges(set_of_edges):
+
+ # determine num_nodes
+ #
+ set_of_nodes = set()
+ for edge in set_of_edges:
+ for node in edge:
+ set_of_nodes.add(node)
+ num_nodes = len(set_of_nodes)
+ list_of_nodes = list(set_of_nodes)
+
+ # We need node_ids that start at zero and go
+ # up sequentially with no gaps, so create a
+ # map for new node_ids
+ new_node_id_map = {}
+ for index, node_id in enumerate(list_of_nodes):
+ new_node_id_map[node_id] = index
+
+ # Now create a new set of edges with the new node_ids
+ new_set_of_edges = set()
+ for edge in set_of_edges:
+ new_edge = (new_node_id_map[edge[0]], new_node_id_map[edge[1]])
+ new_set_of_edges.add(new_edge)
+
+ # debugging:
+
+ # create an empty adjacency matrix
+ #
+ adj_matrix = create_empty_adjacency_matrix(num_nodes)
+
+ # add the edges to the adjacency matrix
+ #
+ for edge in new_set_of_edges:
+ add_edge(adj_matrix, edge[0], edge[1])
+
+ return adj_matrix
+
+ def inner_has_cycle(adj_matrix, visited, s, visit_list):
+ # This routine does a depth first search looking
+ # for cycles - if it encounters a node that it has
+ # already seen then it returns True.
+
+ # Record having visited this node
+ #
+ visited[s] = True
+ visit_list.append(s)
+
+ # Recursively visit all adjacent vertices looking for cycles
+ # If we have already visited a node, then there is a cycle...
+ #
+ for i in range(len(adj_matrix)):
+ # Recurse on every adjacent / child node...
+ if adj_matrix[s][i] == 1:
+ if visited[i]:
+ return True
+ else:
+ # remove this edge from adjacency matrix so we
+ # don't follow link back to node, i.
+ #
+ delete_edge(adj_matrix, s, i)
+ if inner_has_cycle(adj_matrix, visited, i, visit_list):
+ return True
+ return False
+
+ adj_matrix = create_adjacency_matrix_from_set_of_edges(set_of_edges)
+ visited = [False] * len(adj_matrix)
+ visit_list = []
+ root_node = 0 # arbitrary, but every graph has a node 0
+ cycle_found = inner_has_cycle(adj_matrix, visited, root_node, visit_list)
+ return cycle_found
+
+
+def test_graph_has_cycle():
+ # Test to make sure the utility routine, graph_has_cycle, works
+
+ # First try with no cycle
+ # Define the edges of the graph
+ set_of_edges = {
+ (11, 2),
+ (11, 0),
+ # (2, 0), # no cycle without this edge
+ (2, 3),
+ (2, 4),
+ }
+ the_graph_has_a_cycle = graph_has_cycle(set_of_edges)
+ assert not the_graph_has_a_cycle
+
+ # Now try with a cycle
+ # Define the edges of the graph
+ set_of_edges = {
+ (11, 2),
+ (11, 0),
+ (2, 0), # this edge creates a cycle
+ (2, 3),
+ (2, 4),
+ }
+ the_graph_has_a_cycle = graph_has_cycle(set_of_edges)
+ assert the_graph_has_a_cycle
+
+
+def test_generic_bfs_edges(four_by_five_graph_nx, four_by_five_graph_rx):
+ #
+ # The routine, generic_bfs_edges() returns an ordered list of
+ # edges from a breadth-first traversal of a graph, starting
+ # at the given node.
+ #
+ # For our graphs, there are two connected components (the first
+ # two rows and the last two rows) and each component is a
+ # grid:
+ #
+ # 0 - 1 - 2 - 3 - 4
+ # | | | | |
+ # 5 - 6 - 7 - 8 - 9
+ #
+ # 10 - 11 - 12 - 13 - 14
+ # | | | | |
+ # 15 - 16 - 17 - 18 - 19
+ #
+ # So, a BFS starting at 0 should produce something like:
+ #
+ # [ (0,5), (0,1), (1,6), (1,2), (2,7), (2,3), (3,8), (3,4), (4,9) ]
+ #
+ # However, the specific order that is returned depends on the
+ # internals of the algorithm.
+ #
+
+ #
+ bfs_edges_nx_0 = set(four_by_five_graph_nx.generic_bfs_edges(0))
+ expected_set_of_edges = {
+ (0, 5),
+ (0, 1),
+ (1, 6),
+ (1, 2),
+ (2, 7),
+ (2, 3),
+ (3, 8),
+ (3, 4),
+ (4, 9),
+ }
+ # debugging:
+ assert bfs_edges_nx_0 == expected_set_of_edges
+
+ # Check that generic_bfs_edges() does not produce a cycle
+ the_graph_has_a_cycle = graph_has_cycle(bfs_edges_nx_0)
+ assert not the_graph_has_a_cycle
+ bfs_edges_nx_12 = set(four_by_five_graph_nx.generic_bfs_edges(12))
+ the_graph_has_a_cycle = graph_has_cycle(bfs_edges_nx_12)
+ assert not the_graph_has_a_cycle
+
+ """
+ TODO: Testing:
+ * Think about whether this test is actually appropriate. The
+ issue is that the expected_set_of_edges is the right set
+ for this particular graph, but I am not sure that this is
+ a good enough test. Think about other situations...
+
+ * Think about whether to verify that the BFS returned
+ has no cycles. It doesn't in this particular case,
+ but perhaps we should have more cases that stress the test...
+ """
+
+
+def test_generic_bfs_successors_generator():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Note that the code for this routine is very straight-forward, so
+ # writing a test is not high-priority. The only reason I did not
+ # just go ahead and write one is because it was not immediately
+ # clear to me how to write the test - more work than doing a
+ # thorough code review...
+ #
+ assert True
+
+
+def test_generic_bfs_successors():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Code is trivial, but because this routine is important it
+ # deserves a test - just not clear off top of my head how
+ # to write the test...
+ #
+ assert True
+
+
+def test_generic_bfs_predecessors():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Code is trivial, but because this routine is important it
+ # deserves a test - just not clear off top of my head how
+ # to write the test...
+ #
+ assert True
+
+
+def test_predecessors():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Code is trivial, but because this routine is important it
+ # deserves a test - just not clear off top of my head how
+ # to write the test...
+ #
+ assert True
+
+
+def test_successors():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Code is trivial, but because this routine is important it
+ # deserves a test - just not clear off top of my head how
+ # to write the test...
+ #
+ assert True
+
+
+def test_laplacian_matrix():
+ # TODO: Testing: Write a test for this routine
+ #
+ # Not clear off the top of my head how
+ # to write the test...
+ #
+ assert True
+
+
+def test_normalized_laplacian_matrix():
+ # TODO: Testing: Write a test for this routine
+ #
+ # This routine has not yet been implemented (as
+ # of October 2025), but when it is implemented
+ # we should add a test for it...
+ #
+ assert True
+
+
+"""
+=============================================================
+
+TODO: Code: ???
+
+ * Aliasing concerns:
+
+ It occurs to me that the RX node_data is aliased with the NX node_data.
+ That is, the data dictionaries in the NX Graph are just retained
+ when the NX Graph is converted to be an RX Graph - so if you change
+ the data in the RX Graph, the NX Graph from which we created the RX
+ graph will also be changed.
+
+ I believe that this is also true for subgraphs for both NX and RX,
+ meaning that the node_data in the subgraph is the exact same
+ data dictionary in the parent graph and the subgraph.
+
+ I am not sure if this is a problem or not, but it is something
+ to be tested / thought about...
+
+ * NX allows node_ids to be almost anything - they can be integers,
+ strings, even tuples. I think that they just need to be hashable.
+
+ I don't know if we need to test that non-integer NX node_ids
+ don't cause a problem. There are tests elsewhere that have
+ NX node_ids that are tuples, and that test passes, so I think
+ we are OK, but there are no tests specifically targeting this
+ issue that I know of.
+
+=============================================================
+"""
diff --git a/tests/test_laplacian.py b/tests/test_laplacian.py
new file mode 100644
index 00000000..83e252f9
--- /dev/null
+++ b/tests/test_laplacian.py
@@ -0,0 +1,103 @@
+import networkx as nx
+import numpy as np
+import pytest
+import rustworkx as rx
+
+from gerrychain.graph import Graph
+
+"""
+This tests whether we compute the same laplacian matrix for NX and RX
+based Graph objects.
+
+The NX version is computed (as was true in the old code) by a built-in
+NetworkX routine. The RX version is computed by code added when we
+supported RX as the embedded graph object.
+
+The NX version produces ints from the code below, while the RX
+version produces floats. I don't think this matters as the laplacian
+matrix is used to do numerical calculations, so that code should
+happily use ints or floats, but it means that for this test I need
+to convert the NX version's result to have floating point values.
+"""
+
+# frm: TODO: Testing: Add additional tests for laplacian matrix calculations, in
+# particular, add a test for normalized_laplacian_matrix()
+# once that routine has been implemented.
+
+
+def are_sparse_matrices_equal(sparse_matrix1, sparse_matrix2, rtol=1e-05, atol=1e-08):
+ """
+ Checks if two scipy.sparse.csr_matrix objects are equal, considering
+ potential floating-point inaccuracies in the data.
+
+ Args:
+ sparse_matrix1 (scipy.sparse.csr_matrix): The first sparse matrix.
+ sparse_matrix2 (scipy.sparse.csr_matrix): The second sparse matrix.
+ rtol (float): The relative tolerance parameter for np.allclose.
+ atol (float): The absolute tolerance parameter for np.allclose.
+
+ Returns:
+ bool: True if the sparse matrices are equal, False otherwise.
+ """
+ # Check if shapes are equal
+ if sparse_matrix1.shape != sparse_matrix2.shape:
+ return False
+
+ # Check if the number of non-zero elements is equal
+ if sparse_matrix1.nnz != sparse_matrix2.nnz:
+ return False
+
+ # Check for equality of structural components (indices and indptr)
+ # These should be exact matches
+ if not (
+ np.array_equal(sparse_matrix1.indices, sparse_matrix2.indices)
+ and np.array_equal(sparse_matrix1.indptr, sparse_matrix2.indptr)
+ ):
+ return False
+
+ # Check for approximate equality of data (values)
+ # Use np.allclose to handle floating-point comparisons
+ if not np.allclose(sparse_matrix1.data, sparse_matrix2.data, rtol=rtol, atol=atol):
+ return False
+
+ return True
+
+
+# Create equivalent NX and RX graphs from scratch
+
+
+@pytest.fixture
+def nx_graph():
+ this_nx_graph = nx.Graph([(0, 1), (0, 2), (1, 2), (2, 3)])
+ return this_nx_graph
+
+
+@pytest.fixture
+def rx_graph():
+ this_rx_graph = rx.PyGraph()
+ # argument to add_node_from() is the data to be associated with each node.
+ # To be compatible with GerryChain, nodes need to have data values that are dictionaries
+ # so we just have an empty dict for each node's data
+ this_rx_graph.add_nodes_from([{}, {}, {}, {}])
+ this_rx_graph.add_edges_from([(0, 1, {}), (0, 2, {}), (1, 2, {}), (2, 3, {})])
+ return this_rx_graph
+
+
+def test_nx_rx_laplacian_matrix_equality(nx_graph, rx_graph):
+
+ # Create Graph objects from the NX and RX graphs
+ gc_nx_graph = Graph.from_networkx(nx_graph)
+ gc_rx_graph = Graph.from_rustworkx(rx_graph)
+
+ # Compute the laplacian_matrix for both the NX and RX based Graph objects
+ gc_nx_laplacian_matrix = gc_nx_graph.laplacian_matrix()
+ gc_rx_laplacian_matrix = gc_rx_graph.laplacian_matrix()
+
+ # Convert values in the NX version to be floating point
+ float_gc_nx_laplacian_matrix = gc_nx_laplacian_matrix.astype(float)
+
+ # test equality
+ matrices_are_equal = are_sparse_matrices_equal(
+ float_gc_nx_laplacian_matrix, gc_rx_laplacian_matrix
+ )
+ assert matrices_are_equal
diff --git a/tests/test_make_graph.py b/tests/test_make_graph.py
index e7c1a5c8..f51a116d 100644
--- a/tests/test_make_graph.py
+++ b/tests/test_make_graph.py
@@ -3,14 +3,18 @@
from unittest.mock import patch
import geopandas as gp
+import networkx
import pandas
import pytest
-from shapely.geometry import Polygon
from pyproj import CRS
+from shapely.geometry import Polygon
from gerrychain.graph import Graph
from gerrychain.graph.geo import GeometryError
+# frm: added following import
+# from gerrychain.graph import node_data
+
@pytest.fixture
def geodataframe():
@@ -65,26 +69,44 @@ def target_file():
def test_add_data_to_graph_can_handle_column_names_that_start_with_numbers():
- graph = Graph([("01", "02"), ("02", "03"), ("03", "01")])
+
+ # frm: Test has been modified to work with new Graph object that has an NetworkX.Graph
+ # object embedded inside it. I am not sure if this test actually tests
+ # anything useful anymore...
+
+ nx_graph = networkx.Graph([("01", "02"), ("02", "03"), ("03", "01")])
df = pandas.DataFrame({"16SenDVote": [20, 30, 50], "node": ["01", "02", "03"]})
df = df.set_index("node")
+ # frm: Note that the new Graph only supports the add_data() routine if
+ # the underlying graph object is an NX Graph
+
+ graph = Graph.from_networkx(nx_graph)
+
graph.add_data(df, ["16SenDVote"])
- assert graph.nodes["01"]["16SenDVote"] == 20
- assert graph.nodes["02"]["16SenDVote"] == 30
- assert graph.nodes["03"]["16SenDVote"] == 50
+ # Test that the embedded nx_graph object has the added data
+ assert nx_graph.nodes["01"]["16SenDVote"] == 20
+ assert nx_graph.nodes["02"]["16SenDVote"] == 30
+ assert nx_graph.nodes["03"]["16SenDVote"] == 50
+
+ # Test that the graph object has the added data
+ assert graph.node_data("01")["16SenDVote"] == 20
+ assert graph.node_data("02")["16SenDVote"] == 30
+ assert graph.node_data("03")["16SenDVote"] == 50
def test_join_can_handle_right_index():
- graph = Graph([("01", "02"), ("02", "03"), ("03", "01")])
+ nx_graph = networkx.Graph([("01", "02"), ("02", "03"), ("03", "01")])
df = pandas.DataFrame({"16SenDVote": [20, 30, 50], "node": ["01", "02", "03"]})
+ graph = Graph.from_networkx(nx_graph)
+
graph.join(df, ["16SenDVote"], right_index="node")
- assert graph.nodes["01"]["16SenDVote"] == 20
- assert graph.nodes["02"]["16SenDVote"] == 30
- assert graph.nodes["03"]["16SenDVote"] == 50
+ assert graph.node_data("01")["16SenDVote"] == 20
+ assert graph.node_data("02")["16SenDVote"] == 30
+ assert graph.node_data("03")["16SenDVote"] == 50
def test_make_graph_from_dataframe_creates_graph(geodataframe):
@@ -102,9 +124,7 @@ def test_make_graph_from_dataframe_gives_correct_graph(geodataframe):
df = geodataframe.set_index("ID")
graph = Graph.from_geodataframe(df)
- assert edge_set_equal(
- set(graph.edges), {("a", "b"), ("a", "c"), ("b", "d"), ("c", "d")}
- )
+ assert edge_set_equal(set(graph.edges), {("a", "b"), ("a", "c"), ("b", "d"), ("c", "d")})
def test_make_graph_works_with_queen_adjacency(geodataframe):
@@ -132,10 +152,10 @@ def test_can_insist_on_not_reprojecting(geodataframe):
graph = Graph.from_geodataframe(df, reproject=False)
for node in ("a", "b", "c", "d"):
- assert graph.nodes[node]["area"] == 1
+ assert graph.node_data(node)["area"] == 1
for edge in graph.edges:
- assert graph.edges[edge]["shared_perim"] == 1
+ assert graph.edge_data(edge)["shared_perim"] == 1
def test_does_not_reproject_by_default(geodataframe):
@@ -143,10 +163,10 @@ def test_does_not_reproject_by_default(geodataframe):
graph = Graph.from_geodataframe(df)
for node in ("a", "b", "c", "d"):
- assert graph.nodes[node]["area"] == 1.0
+ assert graph.node_data(node)["area"] == 1.0
for edge in graph.edges:
- assert graph.edges[edge]["shared_perim"] == 1.0
+ assert graph.edge_data(edge)["shared_perim"] == 1.0
def test_reproject(geodataframe):
@@ -156,10 +176,10 @@ def test_reproject(geodataframe):
graph = Graph.from_geodataframe(df, reproject=True)
for node in ("a", "b", "c", "d"):
- assert graph.nodes[node]["area"] != 1
+ assert graph.node_data(node)["area"] != 1
for edge in graph.edges:
- assert graph.edges[edge]["shared_perim"] != 1
+ assert graph.edge_data(edge)["shared_perim"] != 1
def test_identifies_boundary_nodes(geodataframe_with_boundary):
@@ -167,8 +187,8 @@ def test_identifies_boundary_nodes(geodataframe_with_boundary):
graph = Graph.from_geodataframe(df)
for node in ("a", "b", "c", "e"):
- assert graph.nodes[node]["boundary_node"]
- assert not graph.nodes["d"]["boundary_node"]
+ assert graph.node_data(node)["boundary_node"]
+ assert not graph.node_data("d")["boundary_node"]
def test_computes_boundary_perims(geodataframe_with_boundary):
@@ -178,25 +198,38 @@ def test_computes_boundary_perims(geodataframe_with_boundary):
expected = {"a": 5, "e": 5, "b": 1, "c": 1}
for node, value in expected.items():
- assert graph.nodes[node]["boundary_perim"] == value
+ assert graph.node_data(node)["boundary_perim"] == value
def edge_set_equal(set1, set2):
- return {(y, x) for x, y in set1} | set1 == {(y, x) for x, y in set2} | set2
+ """
+ Returns true if the two sets have the same edges.
+
+ The complication is that for an edge, (1,2) is the same as (2,1), so to compare them you
+ need to canonicalize the edges somehow. This code just takes set1 and set2 and creates
+ a new set for each that has both edge pairs for each edge, and it then compares those new sets.
+ """
+ canonical_set1 = {(y, x) for x, y in set1} | set1
+ canonical_set2 = {(y, x) for x, y in set2} | set2
+ return canonical_set1 == canonical_set2
def test_from_file_adds_all_data_by_default(shapefile):
graph = Graph.from_file(shapefile)
- assert all("data" in node_data for node_data in graph.nodes.values())
- assert all("data2" in node_data for node_data in graph.nodes.values())
+ nx_graph = graph.get_nx_graph()
+
+ assert all("data" in node_data for node_data in nx_graph.nodes.values())
+ assert all("data2" in node_data for node_data in nx_graph.nodes.values())
def test_from_file_and_then_to_json_does_not_error(shapefile, target_file):
graph = Graph.from_file(shapefile)
+ nx_graph = graph.get_nx_graph()
+
# Even the geometry column is copied to the graph
- assert all("geometry" in node_data for node_data in graph.nodes.values())
+ assert all("geometry" in node_data for node_data in nx_graph.nodes.values())
graph.to_json(target_file)
@@ -204,15 +237,19 @@ def test_from_file_and_then_to_json_does_not_error(shapefile, target_file):
def test_from_file_and_then_to_json_with_geometries(shapefile, target_file):
graph = Graph.from_file(shapefile)
+ nx_graph = graph.get_nx_graph()
+
# Even the geometry column is copied to the graph
- assert all("geometry" in node_data for node_data in graph.nodes.values())
+ assert all("geometry" in node_data for node_data in nx_graph.nodes.values())
graph.to_json(target_file, include_geometries_as_geojson=True)
def test_graph_warns_for_islands():
- graph = Graph()
- graph.add_node(0)
+ nx_graph = networkx.Graph()
+ nx_graph.add_node(0)
+
+ graph = Graph.from_networkx(nx_graph)
with pytest.warns(Warning):
graph.warn_for_islands()
@@ -240,11 +277,11 @@ def test_can_ignore_errors_while_making_graph(shapefile):
def test_data_and_geometry(gdf_with_data):
df = gdf_with_data
- graph = Graph.from_geodataframe(df, cols_to_add=["data","data2"])
+ graph = Graph.from_geodataframe(df, cols_to_add=["data", "data2"])
assert graph.geometry is df.geometry
- #graph.add_data(df[["data"]])
+ # graph.add_data(df[["data"]])
assert (graph.data["data"] == df["data"]).all()
- #graph.add_data(df[["data2"]])
+ # graph.add_data(df[["data2"]])
assert list(graph.data.columns) == ["data", "data2"]
@@ -252,7 +289,8 @@ def test_make_graph_from_dataframe_has_crs(gdf_with_data):
graph = Graph.from_geodataframe(gdf_with_data)
assert CRS.from_json(graph.graph["crs"]).equals(gdf_with_data.crs)
+
def test_make_graph_from_shapefile_has_crs(shapefile):
graph = Graph.from_file(shapefile)
df = gp.read_file(shapefile)
- assert CRS.from_json(graph.graph["crs"]).equals(df.crs)
\ No newline at end of file
+ assert CRS.from_json(graph.graph["crs"]).equals(df.crs)
diff --git a/tests/test_metagraph.py b/tests/test_metagraph.py
index 03aa2d59..741fe014 100644
--- a/tests/test_metagraph.py
+++ b/tests/test_metagraph.py
@@ -1,8 +1,11 @@
import pytest
from gerrychain import Partition, updaters
-from gerrychain.metagraph import (all_cut_edge_flips, all_valid_flips,
- all_valid_states_one_flip_away)
+from gerrychain.metagraph import (
+ all_cut_edge_flips,
+ all_valid_flips,
+ all_valid_states_one_flip_away,
+)
@pytest.fixture
@@ -12,12 +15,30 @@ def partition(graph):
def test_all_cut_edge_flips(partition):
+
+ # frm: TODO: Testing: Maybe change all_cut_edge_flips to return a dict
+ #
+ # At present, it returns an iterator, which makes the code below
+ # more complicated than it needs to be. If it just returned
+ # a dict, then the code would be:
+ #
+ # result = set(
+ # node, part for all_cut_edge_flips(partition).items()
+ # )
+ #
result = set(
- (node, part)
- for flip in all_cut_edge_flips(partition)
- for node, part in flip.items()
+ (node, part) for flip in all_cut_edge_flips(partition) for node, part in flip.items()
)
- assert result == {(6, 1), (7, 1), (8, 1), (4, 2), (5, 2), (3, 2)}
+
+ # Convert from internal node_ids to "original" node_ids
+ new_result = set()
+ for internal_node_id, part in result:
+ original_nx_node_id = partition.graph.original_nx_node_id_for_internal_node_id(
+ internal_node_id
+ )
+ new_result.add((original_nx_node_id, part))
+
+ assert new_result == {(6, 1), (7, 1), (8, 1), (4, 2), (5, 2), (3, 2)}
class TestAllValidStatesOneFlipAway:
@@ -35,6 +56,7 @@ def test_accepts_list_of_constraints(self, partition):
def test_all_valid_flips(partition):
+ # frm: TODO: Testing: NX vs. RX node_id issues...
def disallow_six_to_one(partition):
for node, part in partition.flips.items():
if node == 6 and part == 1:
@@ -43,9 +65,23 @@ def disallow_six_to_one(partition):
constraints = [disallow_six_to_one]
+ # frm: TODO: Testing: If I created a utility routine to convert
+ # a list of flips to original node_ids,
+ # then I could use that here and then
+ # convert the resulting list to a set...
+
result = set(
(node, part)
for flip in all_valid_flips(partition, constraints)
for node, part in flip.items()
)
- assert result == {(7, 1), (8, 1), (4, 2), (5, 2), (3, 2)}
+
+ # Convert from internal node_ids to "original" node_ids
+ new_result = set()
+ for internal_node_id, part in result:
+ original_nx_node_id = partition.graph.original_nx_node_id_for_internal_node_id(
+ internal_node_id
+ )
+ new_result.add((original_nx_node_id, part))
+
+ assert new_result == {(7, 1), (8, 1), (4, 2), (5, 2), (3, 2)}
diff --git a/tests/test_region_aware.py b/tests/test_region_aware.py
index bcbe1e0f..0d0ce5b6 100644
--- a/tests/test_region_aware.py
+++ b/tests/test_region_aware.py
@@ -1,46 +1,35 @@
import random
+from concurrent.futures import ProcessPoolExecutor
+from functools import partial
-random.seed(2018)
import pytest
-from functools import partial
-from concurrent.futures import ProcessPoolExecutor
+
from gerrychain import (
+ Graph,
MarkovChain,
Partition,
accept,
constraints,
proposals,
- updaters,
- Graph,
tree,
)
-from gerrychain.tree import ReselectException, BipartitionWarning
-
-
-def run_chain_single(
- seed, category, steps, surcharge, max_attempts=100000, reselect=False
-):
- from gerrychain import (
- MarkovChain,
- Partition,
- accept,
- constraints,
- proposals,
- updaters,
- Graph,
- tree,
- )
- from gerrychain.tree import ReselectException
- from functools import partial
+from gerrychain import updaters as gc_updaters
+from gerrychain.tree import BipartitionWarning
+
+random.seed(2018)
+
+
+def run_chain_single(seed, category, steps, surcharge, max_attempts=100000, reselect=False):
import random
+ from functools import partial
graph = Graph.from_json("tests/graphs_for_test/8x8_with_muni.json")
population_col = "TOTPOP"
updaters = {
- "population": updaters.Tally(population_col, alias="population"),
- "cut_edges": updaters.cut_edges,
- f"{category}_splits": updaters.tally_region_splits([category]),
+ "population": gc_updaters.Tally(population_col, alias="population"),
+ "cut_edges": gc_updaters.cut_edges,
+ f"{category}_splits": gc_updaters.tally_region_splits([category]),
}
initial_partition = Partition(graph, assignment="district", updaters=updaters)
@@ -104,9 +93,7 @@ def test_region_aware_muni_errors():
with pytest.raises(RuntimeError) as exec_info:
# Random seed 0 should fail here
- run_chain_single(
- seed=0, category=region, steps=10000, max_attempts=1, surcharge=2.0
- )
+ run_chain_single(seed=0, category=region, steps=10000, max_attempts=1, surcharge=2.0)
random.seed(2018)
assert "Could not find a possible cut after 1 attempts" in str(exec_info.value)
@@ -161,39 +148,26 @@ def straddled_regions(partition, reg_attr, all_reg_names):
"""Returns the total number of district that straddle two regions in the partition."""
split = {name: 0 for name in all_reg_names}
+ # frm: TODO: Testing: Grok what this tests - not clear to me at this time...
+
for node1, node2 in set(partition.graph.edges() - partition["cut_edges"]):
- split[partition.graph.nodes[node1][reg_attr]] += 1
- split[partition.graph.nodes[node2][reg_attr]] += 1
+ split[partition.graph.node_data(node1)[reg_attr]] += 1
+ split[partition.graph.node_data(node2)[reg_attr]] += 1
return sum(1 for value in split.values() if value > 0)
-def run_chain_dual(
- seed, steps, surcharges={"muni": 0.5, "county": 0.5}, warn_attempts=1000
-):
- from gerrychain import (
- MarkovChain,
- Partition,
- accept,
- constraints,
- proposals,
- updaters,
- Graph,
- tree,
- )
- from functools import partial
+def run_chain_dual(seed, steps, surcharges={"muni": 0.5, "county": 0.5}, warn_attempts=1000):
import random
+ from functools import partial
graph = Graph.from_json("tests/graphs_for_test/8x8_with_muni.json")
population_col = "TOTPOP"
- muni_names = [str(i) for i in range(1, 17)]
- county_names = [str(i) for i in range(1, 5)]
-
updaters = {
- "population": updaters.Tally(population_col, alias="population"),
- "cut_edges": updaters.cut_edges,
- "splits": updaters.tally_region_splits(["muni", "county"]),
+ "population": gc_updaters.Tally(population_col, alias="population"),
+ "cut_edges": gc_updaters.cut_edges,
+ "splits": gc_updaters.tally_region_splits(["muni", "county"]),
}
initial_partition = Partition(graph, assignment="district", updaters=updaters)
diff --git a/tests/test_reproducibility.py b/tests/test_reproducibility.py
index 85d2122c..f79ea2e7 100644
--- a/tests/test_reproducibility.py
+++ b/tests/test_reproducibility.py
@@ -1,7 +1,8 @@
import os
-import pytest
import random
+import pytest
+
random.seed(2018)
@@ -58,37 +59,32 @@ def test_repeatable(three_by_three_grid):
{3: 1},
]
flips = [partition.flips for partition in chain]
- print(flips)
assert flips == expected_flips
@pytest.mark.slow
def test_pa_freeze():
+ import hashlib
+ from functools import partial
+
from gerrychain import (
GeographicPartition,
Graph,
MarkovChain,
- proposals,
- updaters,
- constraints,
accept,
+ constraints,
+ updaters,
)
- import hashlib
from gerrychain.proposals import recom
- from functools import partial
random.seed(2018)
graph = Graph.from_json("docs/_static/PA_VTDs.json")
my_updaters = {"population": updaters.Tally("TOT_POP", alias="population")}
- initial_partition = GeographicPartition(
- graph, assignment="2011_PLA_1", updaters=my_updaters
- )
+ initial_partition = GeographicPartition(graph, assignment="2011_PLA_1", updaters=my_updaters)
- ideal_population = sum(initial_partition["population"].values()) / len(
- initial_partition
- )
+ ideal_population = sum(initial_partition["population"].values()) / len(initial_partition)
# We use functools.partial to bind the extra parameters (pop_col, pop_target, epsilon, node_repeats)
# of the recom proposal.
@@ -100,9 +96,7 @@ def test_pa_freeze():
node_repeats=2,
)
- pop_constraint = constraints.within_percent_of_ideal_population(
- initial_partition, 0.02
- )
+ pop_constraint = constraints.within_percent_of_ideal_population(initial_partition, 0.02)
chain = MarkovChain(
proposal=proposal,
diff --git a/tests/test_tally.py b/tests/test_tally.py
index 220bc149..852e3804 100644
--- a/tests/test_tally.py
+++ b/tests/test_tally.py
@@ -1,19 +1,25 @@
+import random
from collections import defaultdict
-from gerrychain import MarkovChain, Partition, Graph
+import networkx
+
+from gerrychain import Graph, MarkovChain, Partition
from gerrychain.accept import always_accept
from gerrychain.constraints import no_vanishing_districts, single_flip_contiguous
from gerrychain.grid import Grid
from gerrychain.proposals import propose_random_flip
-import random
from gerrychain.updaters.tally import DataTally, Tally
+
random.seed(2018)
+
def random_assignment(graph, num_districts):
return {node: random.choice(range(num_districts)) for node in graph.nodes}
def test_data_tally_works_as_an_updater(three_by_three_grid):
+ # Simple test that a DataTally creates an attribute on a partition.
+ # Another test (below) checks that the computed "tally" is correct.
assignment = random_assignment(three_by_three_grid, 4)
data = {node: random.randint(1, 100) for node in three_by_three_grid.nodes}
parts = tuple(set(assignment.values()))
@@ -27,17 +33,30 @@ def test_data_tally_works_as_an_updater(three_by_three_grid):
def test_data_tally_gives_expected_value(three_by_three_grid):
+ # Put all but one of the nodes in part #1, and put the one "first_node"
+ # into part #2.
+
first_node = next(iter(three_by_three_grid.nodes))
assignment = {node: 1 for node in three_by_three_grid.nodes}
assignment[first_node] = 2
+ # All nodes get a value of 1 for the data to be tallied
data = {node: 1 for node in three_by_three_grid}
updaters = {"tally": DataTally(data, alias="tally")}
partition = Partition(three_by_three_grid, assignment, updaters)
+ # Note that in general a flip using node_ids generated before creating
+ # a partition should be translated into "internal" RX-Graph based
+ # node_ids. In this case it is not needed, because it doesn't matter
+ # whether we are using the "original" or the "internal" node_id for
+ # first_node because it still refers to the same node and nothing else
+ # is going on.
+
+ # Create a new partition, adding the "first_node" to part #1
flip = {first_node: 1}
new_partition = partition.flip(flip)
+ # The "tally" should increase by one because of the flipped node's data
assert new_partition["tally"][1] == partition["tally"][1] + 1
@@ -49,7 +68,7 @@ def test_data_tally_mimics_old_tally_usage(graph_with_random_data_factory):
assignment = {i: 1 if i in range(4) else 2 for i in range(9)}
partition = Partition(graph, assignment, updaters)
- expected_total_in_district_one = sum(graph.nodes[i]["total"] for i in range(4))
+ expected_total_in_district_one = sum(graph.node_data(i)["total"] for i in range(4))
assert partition["total"][1] == expected_total_in_district_one
@@ -68,7 +87,7 @@ def get_expected_tally(partition):
expected = defaultdict(int)
for node in partition.graph.nodes:
part = partition.assignment[node]
- expected[part] += partition.graph.nodes[node]["population"]
+ expected[part] += partition.graph.node_data(node)["population"]
return expected
for state in chain:
@@ -77,9 +96,10 @@ def get_expected_tally(partition):
def test_works_when_no_flips_occur():
- graph = Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
+ nx_graph = networkx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)])
+ graph = Graph.from_networkx(nx_graph)
for node in graph:
- graph.nodes[node]["pop"] = node + 1
+ graph.node_data(node)["pop"] = node + 1
partition = Partition(graph, {0: 0, 1: 0, 2: 1, 3: 1}, {"pop": Tally("pop")})
chain = MarkovChain(lambda p: p.flip({}), [], always_accept, partition, 10)
diff --git a/tests/test_tree.py b/tests/test_tree.py
index 1805b8ca..b3c489a7 100644
--- a/tests/test_tree.py
+++ b/tests/test_tree.py
@@ -1,7 +1,10 @@
import functools
+import random
+from functools import partial
import networkx
import pytest
+import rustworkx
from gerrychain import MarkovChain
from gerrychain.constraints import contiguous, within_percent_of_ideal_population
@@ -9,106 +12,214 @@
from gerrychain.partition import Partition
from gerrychain.proposals import recom, reversible_recom
from gerrychain.tree import (
+ PopulatedGraph,
bipartition_tree,
- random_spanning_tree,
+ bipartition_tree_random,
find_balanced_edge_cuts_contraction,
find_balanced_edge_cuts_memoization,
- recursive_tree_part,
+ get_max_prime_factor_less_than,
+ random_spanning_tree,
recursive_seed_part,
- PopulatedGraph,
+ recursive_tree_part,
uniform_spanning_tree,
- get_max_prime_factor_less_than,
- bipartition_tree_random,
)
from gerrychain.updaters import Tally, cut_edges
-from functools import partial
-import random
random.seed(2018)
+#
+# This code is complicated by the need to test both NX-based
+# and RX-based Graph objects.
+#
+# The pattern is to define the test logic in a routine that
+# will be run with both NX-based and RX-based Graph objects
+# and to then have the actual test case call that logic.
+# This buries the asserts down a level, which means that
+# figuring out what went wrong if a test fails will be
+# slightly more challenging, but it keeps the logic for
+# testing both NX-based and RX-based Graph objects clean.
+#
+
+# frm: TODO: Documentation: test_tree.py: explain nx_to_rx_node_id_map
+
@pytest.fixture
-def graph_with_pop(three_by_three_grid):
+def graph_with_pop_nx(three_by_three_grid):
+ # NX-based Graph object
for node in three_by_three_grid:
- three_by_three_grid.nodes[node]["pop"] = 1
- return Graph.from_networkx(three_by_three_grid)
+ three_by_three_grid.node_data(node)["pop"] = 1
+ return three_by_three_grid
@pytest.fixture
-def partition_with_pop(graph_with_pop):
+def graph_with_pop_rx(graph_with_pop_nx):
+ # RX-based Graph object (same data as NX-based version)
+ graph_rx = graph_with_pop_nx.convert_from_nx_to_rx()
+ return graph_rx
+
+
+@pytest.fixture
+def partition_with_pop(graph_with_pop_nx):
+ # No need for an RX-based Graph here because creating the
+ # Partition object converts the graph to be RX-based if
+ # it is not already RX-based
+ #
return Partition(
- graph_with_pop,
+ graph_with_pop_nx,
{0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 1, 6: 1, 7: 1, 8: 1},
updaters={"pop": Tally("pop"), "cut_edges": cut_edges},
)
@pytest.fixture
-def twelve_by_twelve_with_pop():
+def twelve_by_twelve_with_pop_nx():
+ # NX-based Graph object
+
xy_grid = networkx.grid_graph([12, 12])
+
+ # Relabel nodes with integers rather than tuples. Node
+ # in cartesian coordinate (x,y) will be relabeled with
+ # the integer = x*12 + y , which just numbers nodes
+ # sequentially from 0 by row...
+ #
nodes = {node: node[1] + 12 * node[0] for node in xy_grid}
grid = networkx.relabel_nodes(xy_grid, nodes)
+
for node in grid:
grid.nodes[node]["pop"] = 1
return Graph.from_networkx(grid)
-def test_bipartition_tree_returns_a_subset_of_nodes(graph_with_pop):
- ideal_pop = sum(graph_with_pop.nodes[node]["pop"] for node in graph_with_pop) / 2
- result = bipartition_tree(graph_with_pop, "pop", ideal_pop, 0.25, 10)
+@pytest.fixture
+def twelve_by_twelve_with_pop_rx(twelve_by_twelve_with_pop_nx):
+ # RX-based Graph object (same data as NX-based version)
+ graph_rx = twelve_by_twelve_with_pop_nx.convert_from_nx_to_rx()
+ return graph_rx
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_bipartition_tree_random_returns_a_subset_of_nodes(graph):
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
+ result = bipartition_tree_random(graph, "pop", ideal_pop, 0.25, 10)
assert isinstance(result, frozenset)
- assert all(node in graph_with_pop.nodes for node in result)
+ assert all(node in graph.nodes for node in result)
+
+
+def test_bipartition_tree_random_returns_a_subset_of_nodes(graph_with_pop_nx, graph_with_pop_rx):
+ # Test both NX-based and RX-based Graph objects
+ do_test_bipartition_tree_random_returns_a_subset_of_nodes(graph_with_pop_nx)
+ do_test_bipartition_tree_random_returns_a_subset_of_nodes(graph_with_pop_rx)
+
+# ---------------------------------------------------------------------
-def test_bipartition_tree_returns_within_epsilon_of_target_pop(graph_with_pop):
- ideal_pop = sum(graph_with_pop.nodes[node]["pop"] for node in graph_with_pop) / 2
+
+def do_test_bipartition_tree_random_returns_within_epsilon_of_target_pop(graph):
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
epsilon = 0.25
- result = bipartition_tree(graph_with_pop, "pop", ideal_pop, epsilon, 10)
+ result = bipartition_tree_random(graph, "pop", ideal_pop, epsilon, 10)
- part_pop = sum(graph_with_pop.nodes[node]["pop"] for node in result)
+ part_pop = sum(graph.node_data(node)["pop"] for node in result)
assert abs(part_pop - ideal_pop) / ideal_pop < epsilon
-def test_recursive_tree_part_returns_within_epsilon_of_target_pop(
- twelve_by_twelve_with_pop,
+def test_bipartition_tree_random_returns_within_epsilon_of_target_pop(
+ graph_with_pop_nx, graph_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_bipartition_tree_random_returns_within_epsilon_of_target_pop(graph_with_pop_nx)
+ do_test_bipartition_tree_random_returns_within_epsilon_of_target_pop(graph_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_bipartition_tree_returns_a_subset_of_nodes(graph):
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
+ result = bipartition_tree(graph, "pop", ideal_pop, 0.25, 10)
+ assert isinstance(result, frozenset)
+ assert all(node in graph.nodes for node in result)
+
+
+def test_bipartition_tree_returns_a_subset_of_nodes(graph_with_pop_nx, graph_with_pop_rx):
+ # Test both NX-based and RX-based Graph objects
+ do_test_bipartition_tree_returns_a_subset_of_nodes(graph_with_pop_nx)
+ do_test_bipartition_tree_returns_a_subset_of_nodes(graph_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_bipartition_tree_returns_within_epsilon_of_target_pop(graph):
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
+ epsilon = 0.25
+ result = bipartition_tree(graph, "pop", ideal_pop, epsilon, 10)
+
+ part_pop = sum(graph.node_data(node)["pop"] for node in result)
+ assert abs(part_pop - ideal_pop) / ideal_pop < epsilon
+
+
+def test_bipartition_tree_returns_within_epsilon_of_target_pop(
+ graph_with_pop_nx, graph_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_bipartition_tree_returns_within_epsilon_of_target_pop(graph_with_pop_nx)
+ do_test_bipartition_tree_returns_within_epsilon_of_target_pop(graph_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_recursive_tree_part_returns_within_epsilon_of_target_pop(
+ twelve_by_twelve_with_pop_graph,
):
n_districts = 7 # 144/7 ≈ 20.5 nodes/subgraph (1 person/node)
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.05
result = recursive_tree_part(
- twelve_by_twelve_with_pop,
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
epsilon,
)
- partition = Partition(
- twelve_by_twelve_with_pop, result, updaters={"pop": Tally("pop")}
- )
- return all(
- abs(part_pop - ideal_pop) / ideal_pop < epsilon
- for part_pop in partition["pop"].values()
+ partition = Partition(twelve_by_twelve_with_pop_graph, result, updaters={"pop": Tally("pop")})
+ assert all(
+ abs(part_pop - ideal_pop) / ideal_pop < epsilon for part_pop in partition["pop"].values()
)
-def test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contraction(
- twelve_by_twelve_with_pop,
+def test_recursive_tree_part_returns_within_epsilon_of_target_pop(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_tree_part_returns_within_epsilon_of_target_pop(twelve_by_twelve_with_pop_nx)
+ do_test_recursive_tree_part_returns_within_epsilon_of_target_pop(twelve_by_twelve_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_graph,
):
n_districts = 7 # 144/7 ≈ 20.5 nodes/subgraph (1 person/node)
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.05
result = recursive_tree_part(
- twelve_by_twelve_with_pop,
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
@@ -119,28 +230,40 @@ def test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contract
balance_edge_fn=find_balanced_edge_cuts_contraction,
),
)
- partition = Partition(
- twelve_by_twelve_with_pop, result, updaters={"pop": Tally("pop")}
+ partition = Partition(twelve_by_twelve_with_pop_graph, result, updaters={"pop": Tally("pop")})
+ assert all(
+ abs(part_pop - ideal_pop) / ideal_pop < epsilon for part_pop in partition["pop"].values()
+ )
+
+
+def test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_nx
)
- return all(
- abs(part_pop - ideal_pop) / ideal_pop < epsilon
- for part_pop in partition["pop"].values()
+ do_test_recursive_tree_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_rx
)
-def test_recursive_seed_part_returns_within_epsilon_of_target_pop(
- twelve_by_twelve_with_pop,
+# ---------------------------------------------------------------------
+
+
+def do_test_recursive_seed_part_returns_within_epsilon_of_target_pop(
+ twelve_by_twelve_with_pop_graph,
):
n_districts = 7 # 144/7 ≈ 20.5 nodes/subgraph (1 person/node)
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.1
result = recursive_seed_part(
- twelve_by_twelve_with_pop,
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
@@ -148,28 +271,36 @@ def test_recursive_seed_part_returns_within_epsilon_of_target_pop(
n=5,
ceil=None,
)
- partition = Partition(
- twelve_by_twelve_with_pop, result, updaters={"pop": Tally("pop")}
- )
- return all(
- abs(part_pop - ideal_pop) / ideal_pop < epsilon
- for part_pop in partition["pop"].values()
+ partition = Partition(twelve_by_twelve_with_pop_graph, result, updaters={"pop": Tally("pop")})
+ assert all(
+ abs(part_pop - ideal_pop) / ideal_pop < epsilon for part_pop in partition["pop"].values()
)
-def test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contraction(
- twelve_by_twelve_with_pop,
+def test_recursive_seed_part_returns_within_epsilon_of_target_pop(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_seed_part_returns_within_epsilon_of_target_pop(twelve_by_twelve_with_pop_nx)
+ do_test_recursive_seed_part_returns_within_epsilon_of_target_pop(twelve_by_twelve_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_graph,
):
n_districts = 7 # 144/7 ≈ 20.5 nodes/subgraph (1 person/node)
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.1
result = recursive_seed_part(
- twelve_by_twelve_with_pop,
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
@@ -182,16 +313,28 @@ def test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contract
balance_edge_fn=find_balanced_edge_cuts_contraction,
),
)
- partition = Partition(
- twelve_by_twelve_with_pop, result, updaters={"pop": Tally("pop")}
+ partition = Partition(twelve_by_twelve_with_pop_graph, result, updaters={"pop": Tally("pop")})
+ assert all(
+ abs(part_pop - ideal_pop) / ideal_pop < epsilon for part_pop in partition["pop"].values()
)
- return all(
- abs(part_pop - ideal_pop) / ideal_pop < epsilon
- for part_pop in partition["pop"].values()
+
+
+def test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_nx
)
+ do_test_recursive_seed_part_returns_within_epsilon_of_target_pop_using_contraction(
+ twelve_by_twelve_with_pop_rx
+ )
+
+# ---------------------------------------------------------------------
-def test_recursive_seed_part_uses_method(twelve_by_twelve_with_pop):
+
+def do_test_recursive_seed_part_uses_method(twelve_by_twelve_with_pop_graph):
calls = 0
def dummy_method(graph, pop_col, pop_target, epsilon, node_repeats, one_sided_cut):
@@ -210,13 +353,13 @@ def dummy_method(graph, pop_col, pop_target, epsilon, node_repeats, one_sided_cu
n_districts = 7 # 144/7 ≈ 20.5 nodes/subgraph (1 person/node)
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.1
- result = recursive_seed_part(
- twelve_by_twelve_with_pop,
+ _ = recursive_seed_part(
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
@@ -232,65 +375,175 @@ def dummy_method(graph, pop_col, pop_target, epsilon, node_repeats, one_sided_cu
assert calls >= n_districts - 1
-def test_recursive_seed_part_with_n_unspecified_within_epsilon(
- twelve_by_twelve_with_pop,
+def test_recursive_seed_part_uses_method(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_seed_part_uses_method(twelve_by_twelve_with_pop_nx)
+ do_test_recursive_seed_part_uses_method(twelve_by_twelve_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_recursive_seed_part_with_n_unspecified_within_epsilon(
+ twelve_by_twelve_with_pop_graph,
):
n_districts = 6 # This should set n=3
ideal_pop = (
sum(
- twelve_by_twelve_with_pop.nodes[node]["pop"]
- for node in twelve_by_twelve_with_pop
+ twelve_by_twelve_with_pop_graph.node_data(node)["pop"]
+ for node in twelve_by_twelve_with_pop_graph
)
) / n_districts
epsilon = 0.05
result = recursive_seed_part(
- twelve_by_twelve_with_pop,
+ twelve_by_twelve_with_pop_graph,
range(n_districts),
ideal_pop,
"pop",
epsilon,
ceil=None,
)
- partition = Partition(
- twelve_by_twelve_with_pop, result, updaters={"pop": Tally("pop")}
- )
- return all(
- abs(part_pop - ideal_pop) / ideal_pop < epsilon
- for part_pop in partition["pop"].values()
+ partition = Partition(twelve_by_twelve_with_pop_graph, result, updaters={"pop": Tally("pop")})
+ assert all(
+ abs(part_pop - ideal_pop) / ideal_pop < epsilon for part_pop in partition["pop"].values()
)
-def test_random_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop):
- tree = random_spanning_tree(graph_with_pop)
- assert networkx.is_tree(tree)
+def test_recursive_seed_part_with_n_unspecified_within_epsilon(
+ twelve_by_twelve_with_pop_nx, twelve_by_twelve_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_recursive_seed_part_with_n_unspecified_within_epsilon(twelve_by_twelve_with_pop_nx)
+ do_test_recursive_seed_part_with_n_unspecified_within_epsilon(twelve_by_twelve_with_pop_rx)
-def test_uniform_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop):
- tree = uniform_spanning_tree(graph_with_pop)
- assert networkx.is_tree(tree)
+# ---------------------------------------------------------------------
-def test_bipartition_tree_returns_a_tree(graph_with_pop):
- ideal_pop = sum(graph_with_pop.nodes[node]["pop"] for node in graph_with_pop) / 2
- tree = Graph.from_networkx(
- networkx.Graph([(0, 1), (1, 2), (1, 4), (3, 4), (4, 5), (3, 6), (6, 7), (6, 8)])
- )
- for node in tree:
- tree.nodes[node]["pop"] = graph_with_pop.nodes[node]["pop"]
+def do_test_random_spanning_tree_returns_tree_with_pop_attribute(graph):
+ tree = random_spanning_tree(graph)
+ assert tree.is_a_tree()
- result = bipartition_tree(
- graph_with_pop, "pop", ideal_pop, 0.25, 10, tree, lambda x: 4
- )
- assert networkx.is_tree(tree.subgraph(result))
- assert networkx.is_tree(
- tree.subgraph({node for node in tree if node not in result})
+def test_random_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop_nx, graph_with_pop_rx):
+ # Test both NX-based and RX-based Graph objects
+ do_test_random_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop_nx)
+ do_test_random_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_uniform_spanning_tree_returns_tree_with_pop_attribute(graph):
+ tree = uniform_spanning_tree(graph)
+ assert tree.is_a_tree()
+
+
+def test_uniform_spanning_tree_returns_tree_with_pop_attribute(
+ graph_with_pop_nx, graph_with_pop_rx
+):
+ # Test both NX-based and RX-based Graph objects
+ do_test_uniform_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop_nx)
+ do_test_uniform_spanning_tree_returns_tree_with_pop_attribute(graph_with_pop_rx)
+
+
+# ---------------------------------------------------------------------
+
+
+def do_test_bipartition_tree_returns_a_tree(graph, spanning_tree):
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
+
+ result = bipartition_tree(graph, "pop", ideal_pop, 0.25, 10, spanning_tree, lambda x: 4)
+
+ assert spanning_tree.subgraph(result).is_a_tree()
+ assert spanning_tree.subgraph(
+ {node for node in spanning_tree if node not in result}
+ ).is_a_tree()
+
+
+def create_graphs_from_nx_edges(num_nodes, list_of_edges_nx, nx_to_rx_node_id_map):
+
+ # NX is easy - just use the list of NX edges
+ graph_nx = Graph.from_networkx(networkx.Graph(list_of_edges_nx))
+
+ # RX requires more work.
+ #
+ # First we create the RX graph and add nodes.
+ #
+ # frm: TODO: Testing: Update test so that the number of nodes is not hard-coded...
+ #
+ # Then we to create the appropriate RX edges - the ones that
+ # correspond to the NX edges but using the RX node_ids for the edges.
+ #
+ # First we have to translate the node_ids used in the
+ # list of edges to be the ones used in the RX graph using the
+ # nx_to_rx_node_id_map. Then we need to create a rustworkx.PyGraph and then
+ # from that create a "new" Graph object.
+
+ # Create the RX graph
+ rx_graph = rustworkx.PyGraph()
+ for i in range(num_nodes):
+ rx_graph.add_node({}) # empty data dict for node_data
+ # Verify that the nodes created have node_ids 0-(num_nodes-1)
+ assert set(rx_graph.node_indices()) == set(range(num_nodes))
+ # Set the attribute identifying the "original" NX node_id
+ # This is normally set by the code that converts an NX graph to RX
+ # but we are cobbling together stuff for a test and so have to
+ # just do it here...
+ rx_to_nx_node_id_map = {v: k for k, v in nx_to_rx_node_id_map.items()}
+ for node_id in rx_graph.node_indices():
+ rx_graph[node_id]["__networkx_node__"] = rx_to_nx_node_id_map[node_id]
+
+ # translate the NX edges into the appropriate node_ids for the derived RX graph
+ list_of_edges_rx = [
+ (
+ nx_to_rx_node_id_map[edge[0]],
+ nx_to_rx_node_id_map[edge[1]],
+ {}, # empty data dict for edge_data
+ )
+ for edge in list_of_edges_nx
+ ]
+
+ # Add the RX edges
+ rx_graph.add_edges_from(list_of_edges_rx)
+ graph_rx = Graph.from_rustworkx(rx_graph)
+
+ return graph_nx, graph_rx
+
+
+def test_bipartition_tree_returns_a_tree(graph_with_pop_nx, graph_with_pop_rx):
+ # Test both NX-based and RX-based Graph objects
+
+ spanning_tree_edges_nx = [
+ (0, 1),
+ (1, 2),
+ (1, 4),
+ (3, 4),
+ (4, 5),
+ (3, 6),
+ (6, 7),
+ (6, 8),
+ ]
+
+ spanning_tree_nx, spanning_tree_rx = create_graphs_from_nx_edges(
+ 9, spanning_tree_edges_nx, graph_with_pop_rx.nx_to_rx_node_id_map
)
+ # Give the nodes a population
+ for node in spanning_tree_nx:
+ spanning_tree_nx.node_data(node)["pop"] = 1
+ for node in spanning_tree_rx:
+ spanning_tree_rx.node_data(node)["pop"] = 1
+
+ do_test_bipartition_tree_returns_a_tree(graph_with_pop_nx, spanning_tree_nx)
+ do_test_bipartition_tree_returns_a_tree(graph_with_pop_rx, spanning_tree_rx)
+
def test_recom_works_as_a_proposal(partition_with_pop):
graph = partition_with_pop.graph
- ideal_pop = sum(graph.nodes[node]["pop"] for node in graph) / 2
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
proposal = functools.partial(
recom, pop_col="pop", pop_target=ideal_pop, epsilon=0.25, node_repeats=5
)
@@ -305,19 +558,70 @@ def test_recom_works_as_a_proposal(partition_with_pop):
def test_reversible_recom_works_as_a_proposal(partition_with_pop):
random.seed(2018)
graph = partition_with_pop.graph
- ideal_pop = sum(graph.nodes[node]["pop"] for node in graph) / 2
+ ideal_pop = sum(graph.node_data(node)["pop"] for node in graph) / 2
proposal = functools.partial(
reversible_recom, pop_col="pop", pop_target=ideal_pop, epsilon=0.10, M=1
)
constraints = [within_percent_of_ideal_population(partition_with_pop, 0.25, "pop")]
+ # frm: ???: I am not sure how epsilon of 0.10 interacts with the constraint.
+ #
+ # The issue is that there are 9 nodes each with a population of 1, so the ideal population
+ # is 4.5. But no matter how you split the graph, you end up with an integer population, say,
+ # 4 or 5 - so you will never get within 0.10 of 4.5.
+ #
+ # I am not quite sure what is being tested here...
+ #
+ # within_percent_of_ideal_population() returns a Bounds object which contains the lower and
+ # upper bounds for a given value - in this case 0.25 percent of the ideal population.
+ #
+ # The more I did into this the more I shake my head. The value of "epsilon" passed into the
+ # reversible_recom() seems to only ever be used when creating a PopulatedGraph which in turn
+ # only ever uses it when doing a specific balanced edge cut algorithm. That is, the value of
+ # epsilon is very rarely used, and yet it is passed in as one of the important paramters to
+ # reversible_recom(). It looks like the original coders thought that it would be a great thing
+ # to have in the PopulatedGraph object, but then they didn't actually use it. *sigh*
+ #
+ # Then this test defines a constraint for population defining it to be OK if the population
+ # is within 25% of ideal - which is at odds with the value of epsilon above of 10%, but since
+ # the value of epsilon (of 10%) is never used, whatever...
+ #
+
+ # frm: TODO: Testing: Grok this test - what is it trying to accomplish?
+ #
+ # The proposal uses reversible_recom() with the default value for the "repeat_until_valid"
+ # parameter which is False. This means that the call to try to combine and then split two
+ # parts (districts) only gets one shot at it before it fails. In this case, that means that
+ # it fails EVERY time - because the initial spanning tree that is returned is not balanced
+ # enough to satisfy the population constraint. If you let it run, then it succeeds after
+ # a couple of attempts (I think 10), but it never succeeds on the first try, and there is no
+ # randomness possible since we only have two parts (districts) that we can merge.
+ #
+ # So this test runs through 100 chain iterations doing NOTHING - returning the same partition
+ # each iteration, and in fact returning the same partition at the end that it started with.
+ #
+ # This raises all sorts of issues:
+ #
+ # * Makes no sense for this test
+ # * Questions the logic in reversible_recom() to not detect an infinite loop
+ # * Questions the logic that does not inform the user somehow that the chain is ineffective
+ # * Raises the issue of documentation of the code - it took me quite a long time to
+ # figure out WTF was going on...
+ #
+
chain = MarkovChain(proposal, constraints, lambda x: True, partition_with_pop, 100)
for state in chain:
assert contiguous(state)
+# frm: TODO: Testing: Add more tests using MarkovChain...
+
+
def test_find_balanced_cuts_contraction():
+
+ # frm: TODO: Testing: Add test for RX-based Graph object
+
tree = Graph.from_networkx(
networkx.Graph([(0, 1), (1, 2), (1, 4), (3, 4), (4, 5), (3, 6), (6, 7), (6, 8)])
)
@@ -330,81 +634,110 @@ def test_find_balanced_cuts_contraction():
# |
# 8
- populated_tree = PopulatedGraph(
- tree, {node: 1 for node in tree}, len(tree) / 2, 0.5
- )
+ populated_tree = PopulatedGraph(tree, {node: 1 for node in tree}, len(tree) / 2, 0.5)
cuts = find_balanced_edge_cuts_contraction(populated_tree)
edges = set(tuple(sorted(cut.edge)) for cut in cuts)
assert edges == {(1, 4), (3, 4), (3, 6)}
def test_no_balanced_cuts_contraction_when_one_side_okay():
- tree = Graph.from_networkx(networkx.Graph([(0, 1), (1, 2), (2, 3), (3, 4)]))
+ list_of_nodes_nx = [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ # For this test we are not dealing with an RX-based Graph object
+ # that is derived fromn an NX-based Graph object, so the
+ # nx_to_rx_node_id_map can just be the identity map...
+ #
+ nx_to_rx_node_id_map = {node: node for node in range(5)}
+
+ tree_nx, tree_rx = create_graphs_from_nx_edges(5, list_of_nodes_nx, nx_to_rx_node_id_map)
+
+ # OK to use the same populations for NX and RX graphs
populations = {0: 4, 1: 4, 2: 3, 3: 3, 4: 3}
- populated_tree = PopulatedGraph(
- graph=tree, populations=populations, ideal_pop=10, epsilon=0.1
+ populated_tree_nx = PopulatedGraph(
+ graph=tree_nx, populations=populations, ideal_pop=10, epsilon=0.1
+ )
+ populated_tree_rx = PopulatedGraph(
+ graph=tree_rx, populations=populations, ideal_pop=10, epsilon=0.1
)
- cuts = find_balanced_edge_cuts_contraction(populated_tree, one_sided_cut=False)
- assert cuts == []
+ cuts_nx = find_balanced_edge_cuts_contraction(populated_tree_nx, one_sided_cut=False)
+ assert cuts_nx == []
+
+ cuts_rx = find_balanced_edge_cuts_contraction(populated_tree_rx, one_sided_cut=False)
+ assert cuts_rx == []
def test_find_balanced_cuts_memo():
- tree = Graph.from_networkx(
- networkx.Graph([(0, 1), (1, 2), (1, 4), (3, 4), (4, 5), (3, 6), (6, 7), (6, 8)])
- )
- # 0 - 1 - 2
- # ||
- # 3= 4 - 5
- # ||
- # 6- 7
- # |
- # 8
+ list_of_nodes_nx = [(0, 1), (1, 2), (1, 4), (3, 4), (4, 5), (3, 6), (6, 7), (6, 8)]
+
+ # For this test we are not dealing with an RX-based Graph object
+ # that is derived fromn an NX-based Graph object, so the
+ # nx_to_rx_node_id_map can just be the identity map...
+ #
+ nx_to_rx_node_id_map = {node: node for node in range(9)}
+
+ tree_nx, tree_rx = create_graphs_from_nx_edges(9, list_of_nodes_nx, nx_to_rx_node_id_map)
- populated_tree = PopulatedGraph(
- tree, {node: 1 for node in tree}, len(tree) / 2, 0.5
+ # 0 - 1 - 2
+ # |
+ # 4 - 3
+ # | |
+ # 5 6 - 7
+ # |
+ # 8
+
+ populated_tree_nx = PopulatedGraph(
+ tree_nx, {node: 1 for node in tree_nx}, len(tree_nx) / 2, 0.5
)
- cuts = find_balanced_edge_cuts_memoization(populated_tree)
- edges = set(tuple(sorted(cut.edge)) for cut in cuts)
- assert edges == {(1, 4), (3, 4), (3, 6)}
+ populated_tree_rx = PopulatedGraph(
+ tree_rx, {node: 1 for node in tree_rx}, len(tree_rx) / 2, 0.5
+ )
+
+ cuts_nx = find_balanced_edge_cuts_memoization(populated_tree_nx)
+ edges_nx = set(tuple(sorted(cut.edge)) for cut in cuts_nx)
+ assert edges_nx == {(1, 4), (3, 4), (3, 6)}
+
+ cuts_rx = find_balanced_edge_cuts_memoization(populated_tree_rx)
+ edges_rx = set(tuple(sorted(cut.edge)) for cut in cuts_rx)
+ assert edges_rx == {(1, 4), (3, 4), (3, 6)}
def test_no_balanced_cuts_memo_when_one_side_okay():
- tree = Graph.from_networkx(networkx.Graph([(0, 1), (1, 2), (2, 3), (3, 4)]))
+ list_of_nodes_nx = [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ # For this test we are not dealing with an RX-based Graph object
+ # that is derived fromn an NX-based Graph object, so the
+ # nx_to_rx_node_id_map can just be the identity map...
+ #
+ nx_to_rx_node_id_map = {node: node for node in range(5)}
+
+ tree_nx, tree_rx = create_graphs_from_nx_edges(5, list_of_nodes_nx, nx_to_rx_node_id_map)
+
+ # OK to use the same populations with both NX and RX Graphs
populations = {0: 4, 1: 4, 2: 3, 3: 3, 4: 3}
- populated_tree = PopulatedGraph(
- graph=tree, populations=populations, ideal_pop=10, epsilon=0.1
+ populated_tree_nx = PopulatedGraph(
+ graph=tree_nx, populations=populations, ideal_pop=10, epsilon=0.1
+ )
+ populated_tree_rx = PopulatedGraph(
+ graph=tree_rx, populations=populations, ideal_pop=10, epsilon=0.1
)
- cuts = find_balanced_edge_cuts_memoization(populated_tree)
- assert cuts == []
+ cuts_nx = find_balanced_edge_cuts_memoization(populated_tree_nx)
+ assert cuts_nx == []
+
+ cuts_rx = find_balanced_edge_cuts_memoization(populated_tree_rx)
+ assert cuts_rx == []
def test_prime_bound():
assert (
get_max_prime_factor_less_than(2024, 20) == 11
- and get_max_prime_factor_less_than(2024, 1) == None
+ and get_max_prime_factor_less_than(2024, 1) is None
and get_max_prime_factor_less_than(2024, 2000) == 23
- and get_max_prime_factor_less_than(2024, -1) == None
+ and get_max_prime_factor_less_than(2024, -1) is None
)
-
-
-def test_bipartition_tree_random_returns_a_subset_of_nodes(graph_with_pop):
- ideal_pop = sum(graph_with_pop.nodes[node]["pop"] for node in graph_with_pop) / 2
- result = bipartition_tree_random(graph_with_pop, "pop", ideal_pop, 0.25, 10)
- assert isinstance(result, frozenset)
- assert all(node in graph_with_pop.nodes for node in result)
-
-
-def test_bipartition_tree_random_returns_within_epsilon_of_target_pop(graph_with_pop):
- ideal_pop = sum(graph_with_pop.nodes[node]["pop"] for node in graph_with_pop) / 2
- epsilon = 0.25
- result = bipartition_tree_random(graph_with_pop, "pop", ideal_pop, epsilon, 10)
-
- part_pop = sum(graph_with_pop.nodes[node]["pop"] for node in result)
- assert abs(part_pop - ideal_pop) / ideal_pop < epsilon
diff --git a/tests/updaters/dbg.py b/tests/updaters/dbg.py
new file mode 100644
index 00000000..e4a4f815
--- /dev/null
+++ b/tests/updaters/dbg.py
@@ -0,0 +1,68 @@
+import math
+import random
+
+import networkx
+
+from gerrychain.graph import Graph
+from gerrychain.partition import Partition
+from gerrychain.updaters import Election
+
+random.seed(2018)
+
+
+def create_three_by_three_grid():
+ """Returns a graph that looks like this:
+ 0 1 2
+ 3 4 5
+ 6 7 8
+ """
+ nx_graph = networkx.Graph()
+ nx_graph.add_edges_from(
+ [
+ (0, 1),
+ (0, 3),
+ (1, 2),
+ (1, 4),
+ (2, 5),
+ (3, 4),
+ (3, 6),
+ (4, 5),
+ (4, 7),
+ (5, 8),
+ (6, 7),
+ (7, 8),
+ ]
+ )
+ return Graph.from_networkx(nx_graph)
+
+
+def random_assignment(graph, num_districts):
+ assignment = {node: random.choice(range(num_districts)) for node in graph.nodes}
+ # Make sure that there are cut edges:
+ while len(set(assignment.values())) == 1:
+ assignment = {node: random.choice(range(num_districts)) for node in graph.nodes}
+ return assignment
+
+
+def test_vote_proportion_returns_nan_if_total_votes_is_zero(three_by_three_grid):
+ election = Election("Mock Election", ["D", "R"], alias="election")
+ graph = three_by_three_grid
+
+ for node in graph.nodes:
+ for col in election.columns:
+ graph.node_data(node)[col] = 0
+
+ updaters = {"election": election}
+ assignment = random_assignment(graph, 3)
+
+ partition = Partition(graph, assignment, updaters)
+
+ assert all(
+ math.isnan(value)
+ for party_percents in partition["election"].percents_for_party.values()
+ for value in party_percents.values()
+ )
+
+
+three_by_three_grid = create_three_by_three_grid()
+test_vote_proportion_returns_nan_if_total_votes_is_zero(three_by_three_grid)
diff --git a/tests/updaters/test_cut_edges.py b/tests/updaters/test_cut_edges.py
index e6582f41..d613a04c 100644
--- a/tests/updaters/test_cut_edges.py
+++ b/tests/updaters/test_cut_edges.py
@@ -1,4 +1,5 @@
import functools
+import random
import pytest
@@ -7,7 +8,6 @@
from gerrychain.constraints import no_vanishing_districts, single_flip_contiguous
from gerrychain.grid import Grid
from gerrychain.updaters import cut_edges, cut_edges_by_part
-import random
random.seed(2018)
@@ -28,6 +28,17 @@ def invalid_cut_edges(partition):
return invalid
+def translate_flips_to_internal_node_ids(partition, flips):
+ # Translate flips into the internal_node_ids for the partition
+ internal_flips = {}
+ for original_nx_node_id, part in flips.items():
+ internal_node_id = partition.graph.internal_node_id_for_original_nx_node_id(
+ original_nx_node_id
+ )
+ internal_flips[internal_node_id] = part
+ return internal_flips
+
+
def test_cut_edges_doesnt_duplicate_edges_with_different_order_of_nodes(
three_by_three_grid,
):
@@ -39,10 +50,13 @@ def test_cut_edges_doesnt_duplicate_edges_with_different_order_of_nodes(
# 222 222
flip = {4: 2, 2: 1, 5: 1}
- new_partition = Partition(parent=partition, flips=flip)
+ internal_flips = translate_flips_to_internal_node_ids(partition, flip)
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
result = new_partition["cut_edges"]
+ # Verify that the same edge is not in the result twice (just in different node_id order)
for edge in result:
assert (edge[1], edge[0]) not in result
@@ -56,13 +70,16 @@ def test_cut_edges_can_handle_multiple_flips(three_by_three_grid):
# 222 222
flip = {4: 2, 2: 1, 5: 1}
- new_partition = Partition(parent=partition, flips=flip)
+ internal_flips = translate_flips_to_internal_node_ids(partition, flip)
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
result = new_partition["cut_edges"]
naive_cut_edges = {
- tuple(sorted(edge)) for edge in graph.edges if new_partition.crosses_parts(edge)
+ tuple(sorted(edge)) for edge in partition.graph.edges if new_partition.crosses_parts(edge)
}
+
assert result == naive_cut_edges
@@ -78,7 +95,9 @@ def test_cut_edges_by_part_doesnt_duplicate_edges_with_opposite_order_of_nodes(
# 222 222
flip = {4: 2, 2: 1, 5: 1}
- new_partition = Partition(parent=partition, flips=flip)
+ internal_flips = translate_flips_to_internal_node_ids(partition, flip)
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
result = new_partition["cut_edges_by_part"]
@@ -97,16 +116,16 @@ def test_cut_edges_by_part_gives_same_total_edges_as_naive_method(three_by_three
# 222 222
flip = {4: 2, 2: 1, 5: 1}
- new_partition = Partition(parent=partition, flips=flip)
+ internal_flips = translate_flips_to_internal_node_ids(partition, flip)
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
result = new_partition["cut_edges_by_part"]
naive_cut_edges = {
- tuple(sorted(edge)) for edge in graph.edges if new_partition.crosses_parts(edge)
+ tuple(sorted(edge)) for edge in partition.graph.edges if new_partition.crosses_parts(edge)
}
- assert naive_cut_edges == {
- tuple(sorted(edge)) for part in result for edge in result[part]
- }
+ assert naive_cut_edges == {tuple(sorted(edge)) for part in result for edge in result[part]}
def test_implementation_of_cut_edges_matches_naive_method(three_by_three_grid):
@@ -115,12 +134,14 @@ def test_implementation_of_cut_edges_matches_naive_method(three_by_three_grid):
partition = Partition(graph, assignment, {"cut_edges": cut_edges})
flip = {4: 2}
- new_partition = Partition(parent=partition, flips=flip)
+
+ internal_flips = translate_flips_to_internal_node_ids(partition, flip)
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
+
result = cut_edges(new_partition)
- naive_cut_edges = {
- edge for edge in graph.edges if new_partition.crosses_parts(edge)
- }
+ naive_cut_edges = {edge for edge in partition.graph.edges if new_partition.crosses_parts(edge)}
assert edge_set_equal(result, naive_cut_edges)
@@ -154,8 +175,6 @@ def test_cut_edges_matches_naive_cut_edges_at_every_step(proposal, number_of_ste
)
for state in chain:
- naive_cut_edges = {
- edge for edge in state.graph.edges if state.crosses_parts(edge)
- }
+ naive_cut_edges = {edge for edge in state.graph.edges if state.crosses_parts(edge)}
assert naive_cut_edges == state["cut_edges"]
diff --git a/tests/updaters/test_election.py b/tests/updaters/test_election.py
index cc6de98b..1f6ba97c 100644
--- a/tests/updaters/test_election.py
+++ b/tests/updaters/test_election.py
@@ -19,20 +19,26 @@ def mock_election():
[1, 2, 3, 4, 5],
)
+
def test_election_results_can_compute_mm(mock_election):
assert mock_election.mean_median() is not None
+
def test_election_results_can_compute_mt(mock_election):
assert mock_election.mean_thirdian() is not None
+
def test_election_results_can_compute_eg(mock_election):
assert mock_election.efficiency_gap() is not None
+
def test_election_results_can_copmute_bias(mock_election):
assert mock_election.partisan_bias() is not None
+
def test_election_results_can_compute_gini(mock_election):
assert mock_election.partisan_gini() is not None
+
def test_election_results_can_compute_percents(mock_election):
assert mock_election.percent("A") > 0
diff --git a/tests/updaters/test_perimeters.py b/tests/updaters/test_perimeters.py
index 05c1f156..0f215e49 100644
--- a/tests/updaters/test_perimeters.py
+++ b/tests/updaters/test_perimeters.py
@@ -8,8 +8,13 @@
def setup():
+
+ # Note that the node_ids for the NX graph for a grid are tuples with the (x,y) position of the node
+
grid = Grid((4, 4), with_diagonals=False)
- flipped_grid = grid.flip({(2, 1): 3})
+
+ flipped_grid = grid.flip({(2, 1): 3}, use_original_nx_node_ids=True)
+
return grid, flipped_grid
@@ -30,29 +35,47 @@ def test_interior_perimeter_handles_flips_with_a_simple_grid():
def test_cut_edges_by_part_handles_flips_with_a_simple_grid():
+
+ # frm: TODO: Testing: Add a graphic here
+ #
+ # That will allow the person reading this code to make sense
+ # of what it does...
+ #
grid, flipped_grid = setup()
result = flipped_grid["cut_edges_by_part"]
- assert result[0] == {
+ # Translate internal edges so that they can be compared to the literals below
+ new_result = {}
+ for part, set_of_edges in result.items():
+ new_set_of_edges = set()
+ for edge in set_of_edges:
+ new_edge = (
+ flipped_grid.graph.original_nx_node_id_for_internal_node_id(edge[0]),
+ flipped_grid.graph.original_nx_node_id_for_internal_node_id(edge[1]),
+ )
+ new_set_of_edges.add(new_edge)
+ new_result[part] = new_set_of_edges
+
+ assert new_result[0] == {
((1, 0), (2, 0)),
((1, 1), (2, 1)),
((0, 1), (0, 2)),
((1, 1), (1, 2)),
}
- assert result[1] == {
+ assert new_result[1] == {
((1, 0), (2, 0)),
((2, 0), (2, 1)),
((2, 1), (3, 1)),
((3, 1), (3, 2)),
}
- assert result[2] == {
+ assert new_result[2] == {
((0, 1), (0, 2)),
((1, 1), (1, 2)),
((1, 2), (2, 2)),
((1, 3), (2, 3)),
}
- assert result[3] == {
+ assert new_result[3] == {
((1, 1), (2, 1)),
((2, 0), (2, 1)),
((2, 1), (3, 1)),
@@ -99,33 +122,29 @@ def test_perimeter_match_naive_perimeter_at_every_step():
def get_exterior_boundaries(partition):
graph_boundary = partition["boundary_nodes"]
exterior = defaultdict(lambda: 0)
- for node in graph_boundary:
- part = partition.assignment[node]
- exterior[part] += partition.graph.nodes[node]["boundary_perim"]
+ for node_id in graph_boundary:
+ part = partition.assignment[node_id]
+ exterior[part] += partition.graph.node_data(node_id)["boundary_perim"]
return exterior
def get_interior_boundaries(partition):
- cut_edges = {
- edge for edge in partition.graph.edges if partition.crosses_parts(edge)
- }
+ cut_edges = {edge for edge in partition.graph.edges if partition.crosses_parts(edge)}
interior = defaultdict(int)
for edge in cut_edges:
for node in edge:
- interior[partition.assignment[node]] += partition.graph.edges[edge][
- "shared_perim"
- ]
+ interior[partition.assignment[node]] += partition.graph.edge_data(
+ partition.graph.get_edge_id_from_edge(edge)
+ )["shared_perim"]
return interior
def expected_perimeter(partition):
interior_boundaries = get_interior_boundaries(partition)
exterior_boundaries = get_exterior_boundaries(partition)
expected = {
- part: interior_boundaries[part] + exterior_boundaries[part]
- for part in partition.parts
+ part: interior_boundaries[part] + exterior_boundaries[part] for part in partition.parts
}
return expected
for state in chain:
expected = expected_perimeter(state)
assert expected == state["perimeter"]
-
diff --git a/tests/updaters/test_spanning_trees.py b/tests/updaters/test_spanning_trees.py
index 0bdf4ec0..2d265e51 100644
--- a/tests/updaters/test_spanning_trees.py
+++ b/tests/updaters/test_spanning_trees.py
@@ -5,9 +5,7 @@
def test_get_num_spanning_trees(three_by_three_grid):
assignment = {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1}
partition = Partition(
- three_by_three_grid,
- assignment,
- {"num_spanning_trees": num_spanning_trees}
+ three_by_three_grid, assignment, {"num_spanning_trees": num_spanning_trees}
)
assert 192 == round(partition["num_spanning_trees"][1])
assert [1] == list(partition["num_spanning_trees"].keys())
diff --git a/tests/updaters/test_split_scores.py b/tests/updaters/test_split_scores.py
index c26a32de..52868067 100644
--- a/tests/updaters/test_split_scores.py
+++ b/tests/updaters/test_split_scores.py
@@ -1,9 +1,10 @@
+import networkx
import pytest
-from gerrychain import Partition
-from gerrychain.updaters.locality_split_scores import LocalitySplits
+from gerrychain import Graph, Partition
from gerrychain.updaters.cut_edges import cut_edges
-from gerrychain import Graph
+from gerrychain.updaters.locality_split_scores import LocalitySplits
+
@pytest.fixture
def three_by_three_grid():
@@ -12,8 +13,8 @@ def three_by_three_grid():
3 4 5
6 7 8
"""
- graph = Graph()
- graph.add_edges_from(
+ nx_graph = networkx.Graph()
+ nx_graph.add_edges_from(
[
(0, 1),
(0, 3),
@@ -29,20 +30,21 @@ def three_by_three_grid():
(7, 8),
]
)
+ graph = Graph.from_networkx(nx_graph)
return graph
@pytest.fixture
def graph_with_counties(three_by_three_grid):
for node in [0, 1, 2]:
- three_by_three_grid.nodes[node]["county"] = "a"
- three_by_three_grid.nodes[node]["pop"] = 1
+ three_by_three_grid.node_data(node)["county"] = "a"
+ three_by_three_grid.node_data(node)["pop"] = 1
for node in [3, 4, 5]:
- three_by_three_grid.nodes[node]["county"] = "b"
- three_by_three_grid.nodes[node]["pop"] = 1
+ three_by_three_grid.node_data(node)["county"] = "b"
+ three_by_three_grid.node_data(node)["pop"] = 1
for node in [6, 7, 8]:
- three_by_three_grid.nodes[node]["county"] = "c"
- three_by_three_grid.nodes[node]["pop"] = 1
+ three_by_three_grid.node_data(node)["county"] = "c"
+ three_by_three_grid.node_data(node)["pop"] = 1
return three_by_three_grid
@@ -51,9 +53,23 @@ def partition(graph_with_counties):
partition = Partition(
graph_with_counties,
assignment={0: 1, 1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 3, 7: 3, 8: 3},
- updaters={"cut_edges":cut_edges, "splits": LocalitySplits("splittings", "county", "pop", ['num_parts', 'num_pieces',
- 'naked_boundary', 'shannon_entropy', 'power_entropy',
- 'symmetric_entropy', 'num_split_localities'])},
+ updaters={
+ "cut_edges": cut_edges,
+ "splits": LocalitySplits(
+ "splittings",
+ "county",
+ "pop",
+ [
+ "num_parts",
+ "num_pieces",
+ "naked_boundary",
+ "shannon_entropy",
+ "power_entropy",
+ "symmetric_entropy",
+ "num_split_localities",
+ ],
+ ),
+ },
)
return partition
@@ -63,22 +79,31 @@ def split_partition(graph_with_counties):
partition = Partition(
graph_with_counties,
assignment={0: 1, 1: 2, 2: 3, 3: 1, 4: 2, 5: 3, 6: 1, 7: 2, 8: 3},
- updaters={"cut_edges":cut_edges, "splits": LocalitySplits("splittings", "county", "pop", ['num_parts', 'num_pieces',
- 'naked_boundary', 'shannon_entropy', 'power_entropy',
- 'symmetric_entropy', 'num_split_localities'])},
+ updaters={
+ "cut_edges": cut_edges,
+ "splits": LocalitySplits(
+ "splittings",
+ "county",
+ "pop",
+ [
+ "num_parts",
+ "num_pieces",
+ "naked_boundary",
+ "shannon_entropy",
+ "power_entropy",
+ "symmetric_entropy",
+ "num_split_localities",
+ ],
+ ),
+ },
)
return partition
-
-
-
-
class TestSplittingScores:
-
-
+
def test_not_split(self, partition):
- part = partition.updaters["splits"](partition)
+ _ = partition.updaters["splits"](partition)
result = partition.updaters["splits"].scores
assert result["num_parts"] == 3
@@ -90,18 +115,13 @@ def test_not_split(self, partition):
assert result["num_split_localities"] == 0
def test_is_split(self, split_partition):
- part = split_partition.updaters["splits"](split_partition)
+ _ = split_partition.updaters["splits"](split_partition)
result = split_partition.updaters["splits"].scores
assert result["num_parts"] == 9
assert result["num_pieces"] == 9
assert result["naked_boundary"] == 6
assert 1.2 > result["shannon_entropy"] > 1
- assert .6 > result["power_entropy"] > .5
- assert 32 > result["symmetric_entropy"] > 31
+ assert 0.6 > result["power_entropy"] > 0.5
+ assert 32 > result["symmetric_entropy"] > 31
assert result["num_split_localities"] == 3
-
-
-
-
-
diff --git a/tests/updaters/test_splits.py b/tests/updaters/test_splits.py
index 1b6c26fa..2d7407c8 100644
--- a/tests/updaters/test_splits.py
+++ b/tests/updaters/test_splits.py
@@ -1,19 +1,21 @@
import pytest
from gerrychain import Partition
-from gerrychain.updaters.county_splits import (CountySplit,
- compute_county_splits,
- county_splits)
+from gerrychain.updaters.county_splits import (
+ CountySplit,
+ compute_county_splits,
+ county_splits,
+)
@pytest.fixture
def graph_with_counties(three_by_three_grid):
for node in [0, 1, 2]:
- three_by_three_grid.nodes[node]["county"] = "a"
+ three_by_three_grid.node_data(node)["county"] = "a"
for node in [3, 4, 5]:
- three_by_three_grid.nodes[node]["county"] = "b"
+ three_by_three_grid.node_data(node)["county"] = "b"
for node in [6, 7, 8]:
- three_by_three_grid.nodes[node]["county"] = "c"
+ three_by_three_grid.node_data(node)["county"] = "c"
return three_by_three_grid
@@ -43,12 +45,14 @@ def test_describes_splits_for_all_counties(self, partition):
assert set(result.keys()) == {"a", "b", "c"}
- after_a_flip = partition.flip({3: 1})
+ after_a_flip = partition.flip({3: 1}, use_original_nx_node_ids=True)
second_result = after_a_flip["splits"]
assert set(second_result.keys()) == {"a", "b", "c"}
def test_no_splits(self, graph_with_counties):
+
+ # frm: TODO: Testing: Why does this not just use "split_partition"? Isn't it the same?
partition = Partition(graph_with_counties, assignment="county")
result = compute_county_splits(partition, "county", None)
@@ -57,7 +61,9 @@ def test_no_splits(self, graph_with_counties):
assert splits_info.split == CountySplit.NOT_SPLIT
def test_new_split(self, partition):
- after_a_flip = partition.flip({3: 1})
+ # Do a flip, using the node_ids of the original assignment (rather than the
+ # node_ids used internally in the RX-based graph)
+ after_a_flip = partition.flip({3: 1}, use_original_nx_node_ids=True)
result = after_a_flip["splits"]
# County b is now split, but a and c are not
@@ -74,7 +80,9 @@ def test_initial_split(self, split_partition):
assert result["c"].split == CountySplit.NOT_SPLIT
def test_old_split(self, split_partition):
- after_a_flip = split_partition.flip({4: 1})
+ # Do a flip, using the node_ids of the original assignment (rather than the
+ # node_ids used internally in the RX-based graph)
+ after_a_flip = split_partition.flip({4: 1}, use_original_nx_node_ids=True)
result = after_a_flip["splits"]
# County b becomes more split
@@ -87,11 +95,11 @@ def test_old_split(self, split_partition):
"previous partition, which is not the intuitive behavior."
)
def test_initial_split_that_disappears_and_comes_back(self, split_partition):
- no_splits = split_partition.flip({3: 2})
+ no_splits = split_partition.flip({3: 2}, use_original_nx_node_ids=True)
result = no_splits["splits"]
assert all(info.split == CountySplit.NOT_SPLIT for info in result.values())
- split_comes_back = no_splits.flip({3: 1})
+ split_comes_back = no_splits.flip({3: 1}, use_original_nx_node_ids=True)
new_result = split_comes_back["splits"]
assert new_result["a"].split == CountySplit.NOT_SPLIT
assert new_result["b"].split == CountySplit.OLD_SPLIT
diff --git a/tests/updaters/test_updaters.py b/tests/updaters/test_updaters.py
index 37a4b97e..8c0d9ef7 100644
--- a/tests/updaters/test_updaters.py
+++ b/tests/updaters/test_updaters.py
@@ -1,4 +1,5 @@
import math
+import random
import networkx
import pytest
@@ -8,14 +9,22 @@
from gerrychain.graph import Graph
from gerrychain.partition import Partition
from gerrychain.proposals import propose_random_flip
-import random
-from gerrychain.updaters import (Election, Tally, boundary_nodes, cut_edges,
- cut_edges_by_part, exterior_boundaries,
- exterior_boundaries_as_a_set,
- interior_boundaries, perimeter)
+from gerrychain.updaters import (
+ Election,
+ Tally,
+ boundary_nodes,
+ cut_edges,
+ cut_edges_by_part,
+ exterior_boundaries,
+ exterior_boundaries_as_a_set,
+ interior_boundaries,
+ perimeter,
+)
from gerrychain.updaters.election import ElectionResults
+
random.seed(2018)
+
@pytest.fixture
def graph_with_d_and_r_cols(graph_with_random_data_factory):
return graph_with_random_data_factory(["D", "R"])
@@ -33,11 +42,10 @@ def random_assignment(graph, num_districts):
def partition_with_election(graph_with_d_and_r_cols):
graph = graph_with_d_and_r_cols
assignment = random_assignment(graph, 3)
- parties_to_columns = {
- "D": {node: graph.nodes[node]["D"] for node in graph.nodes},
- "R": {node: graph.nodes[node]["R"] for node in graph.nodes},
- }
- election = Election("Mock Election", parties_to_columns)
+
+ party_names_to_node_attribute_names = ["D", "R"]
+
+ election = Election("Mock Election", party_names_to_node_attribute_names)
updaters = {"Mock Election": election, "cut_edges": cut_edges}
return Partition(graph, assignment, updaters)
@@ -54,24 +62,36 @@ def chain_with_election(partition_with_election):
def test_Partition_can_update_stats():
- graph = networkx.complete_graph(3)
+ nx_graph = networkx.complete_graph(3)
assignment = {0: 1, 1: 1, 2: 2}
- graph.nodes[0]["stat"] = 1
- graph.nodes[1]["stat"] = 2
- graph.nodes[2]["stat"] = 3
+ nx_graph.nodes[0]["stat"] = 1
+ nx_graph.nodes[1]["stat"] = 2
+ nx_graph.nodes[2]["stat"] = 7
+
+ graph = Graph.from_networkx(nx_graph)
updaters = {"total_stat": Tally("stat", alias="total_stat")}
- partition = Partition(Graph.from_networkx(graph), assignment, updaters)
- assert partition["total_stat"][2] == 3
+ # This test is complicated by the fact that "original" node_ids are typically based
+ # on the node_ids for NX-based graphs, so in this test's case, those would be: 0, 1, 2 .
+ # However, when we create a Partition, we convert to an RX-based graph object and
+ # as a result the internal node_ids for the RX-based graph change. So, when we ask
+ # for graph data from a partition we need to be careful to use its internal node_ids.
+
+ # Verify that the "total_stat" for the part (district) 2 is 7
+ partition = Partition(graph, assignment, updaters)
+ assert partition["total_stat"][2] == 7
+
+ # Flip node with original node_id of 1 to be in part (district) 2
flip = {1: 2}
- new_partition = partition.flip(flip)
- assert new_partition["total_stat"][2] == 5
+ new_partition = partition.flip(flip, use_original_nx_node_ids=True)
+
+ assert new_partition["total_stat"][2] == 9
-def test_tally_multiple_columns(graph_with_d_and_r_cols):
+def test_tally_multiple_node_attribute_names(graph_with_d_and_r_cols):
graph = graph_with_d_and_r_cols
updaters = {"total": Tally(["D", "R"], alias="total")}
@@ -79,7 +99,7 @@ def test_tally_multiple_columns(graph_with_d_and_r_cols):
partition = Partition(graph, assignment, updaters)
expected_total_in_district_one = sum(
- graph.nodes[i]["D"] + graph.nodes[i]["R"] for i in range(4)
+ graph.node_data(i)["D"] + graph.node_data(i)["R"] for i in range(4)
)
assert partition["total"][1] == expected_total_in_district_one
@@ -103,12 +123,13 @@ def test_vote_proportion_updater_returns_percentage_or_nan(partition_with_electi
def test_vote_proportion_returns_nan_if_total_votes_is_zero(three_by_three_grid):
+
election = Election("Mock Election", ["D", "R"], alias="election")
graph = three_by_three_grid
for node in graph.nodes:
- for col in election.columns:
- graph.nodes[node][col] = 0
+ for col in election.node_attribute_names:
+ graph.node_data(node)[col] = 0
updaters = {"election": election}
assignment = random_assignment(graph, 3)
@@ -127,7 +148,7 @@ def is_percentage_or_nan(value):
def test_vote_proportion_updater_returns_percentage_or_nan_on_later_steps(
- chain_with_election
+ chain_with_election,
):
for partition in chain_with_election:
election_view = partition["Mock Election"]
@@ -156,9 +177,7 @@ def test_vote_proportions_sum_to_one(partition_with_election):
def test_election_result_has_a_cute_str_method():
- election = Election(
- "2008 Presidential", {"Democratic": [3, 1, 2], "Republican": [1, 2, 1]}
- )
+ election = Election("2008 Presidential", {"Democratic": [3, 1, 2], "Republican": [1, 2, 1]})
results = ElectionResults(
election,
{"Democratic": {0: 3, 1: 1, 2: 2}, "Republican": {0: 1, 1: 2, 2: 1}},
@@ -179,12 +198,46 @@ def test_election_result_has_a_cute_str_method():
assert str(results) == expected
+def _convert_dict_of_set_of_rx_node_ids_to_set_of_nx_node_ids(
+ dict_of_set_of_rx_nodes, nx_to_rx_node_id_map
+):
+
+ # frm: TODO: Testing: This way to convert node_ids is clumsy and inconvenient. Think of something better...
+
+ # When we create a partition from an NX based Graph we convert it to be an
+ # RX based Graph which changes the node_ids of the graph. If one wants
+ # to convert sets of RX based graph node_ids back to the node_ids in the
+ # original NX Graph, then we can do so by taking advantage of the
+ # nx_to_rx_node_id_map that is generated and saved when we converted the
+ # NX based graph to be based on RX
+ #
+ # This routine converts the data that some updaters create - namely a mapping from
+ # partitions to a set of node_ids.
+
+ converted_set = {}
+ if nx_to_rx_node_id_map is not None: # means graph was converted from NX
+ # reverse the map
+ rx_to_nx_node_id_map = {value: key for key, value in nx_to_rx_node_id_map.items()}
+ converted_set = {}
+ for part, set_of_rx_nodes in dict_of_set_of_rx_nodes.items():
+ converted_set_of_rx_nodes = {
+ rx_to_nx_node_id_map[rx_node_id] for rx_node_id in set_of_rx_nodes
+ }
+ converted_set[part] = converted_set_of_rx_nodes
+ # converted_set = {
+ # part: {rx_to_nx_node_id_map[rx_node_id]}
+ # for part, set_of_rx_node_ids in dict_of_set_of_rx_nodes.items()
+ # for rx_node_id in set_of_rx_node_ids
+ # }
+ return converted_set
+
+
def test_exterior_boundaries_as_a_set(three_by_three_grid):
graph = three_by_three_grid
for i in [0, 1, 2, 3, 5, 6, 7, 8]:
- graph.nodes[i]["boundary_node"] = True
- graph.nodes[4]["boundary_node"] = False
+ graph.node_data(i)["boundary_node"] = True
+ graph.node_data(4)["boundary_node"] = False
assignment = {0: 1, 1: 1, 2: 2, 3: 1, 4: 1, 5: 2, 6: 2, 7: 2, 8: 2}
updaters = {
@@ -194,27 +247,65 @@ def test_exterior_boundaries_as_a_set(three_by_three_grid):
partition = Partition(graph, assignment, updaters)
result = partition["exterior_boundaries_as_a_set"]
- assert result[1] == {0, 1, 3} and result[2] == {2, 5, 6, 7, 8}
- # 112 111
- # 112 -> 121
- # 222 222
- flips = {4: 2, 2: 1, 5: 1}
+ # frm: TODO: Testing: Come up with a nice way to convert the result which uses
+ # RX based node_ids back to the original NX based node_ids...
+
+ # If the original graph that the partition was based on was an NX graph
+ # then we need to convert the RX node_ids in the partition's graph
+ # back to what they were in the NX graph.
+ nx_to_rx_node_id_map = partition.graph.get_nx_to_rx_node_id_map()
+ if nx_to_rx_node_id_map is not None:
+ converted_result = _convert_dict_of_set_of_rx_node_ids_to_set_of_nx_node_ids(
+ result, nx_to_rx_node_id_map
+ )
+ result = converted_result
+
+ assert result[1] == {0, 1, 3} and result[2] == {2, 5, 6, 7, 8}
- new_partition = Partition(parent=partition, flips=flips)
+ # Flip nodes and then recompute partition
+ # boundaries to make sure the updater works properly.
+ # The new partition map will look like this:
+ #
+ # 112 111
+ # 112 -> 121
+ # 222 222
+ #
+ # In terms of the original NX graph's node_ids, we would
+ # do the following flips: 4->2, 2->1, and 5->1
+ #
+ # However, the node_ids in the partition's graph have changed due to
+ # conversion to RX, so we need to translate the flips into RX node_ids
+
+ nx_flips = {4: 2, 2: 1, 5: 1}
+ rx_to_nx_node_id_map = {v: k for k, v in nx_to_rx_node_id_map.items()}
+ rx_flips = {rx_to_nx_node_id_map[nx_node_id]: part for nx_node_id, part in nx_flips.items()}
+
+ new_partition = Partition(parent=partition, flips=rx_flips)
result = new_partition["exterior_boundaries_as_a_set"]
+ # If the original graph that the partition was based on was an NX graph
+ # then we need to convert the RX node_ids in the partition's graph
+ # back to what they were in the NX graph.
+ nx_to_rx_node_id_map = new_partition.graph.get_nx_to_rx_node_id_map()
+ if nx_to_rx_node_id_map is not None:
+ converted_result = _convert_dict_of_set_of_rx_node_ids_to_set_of_nx_node_ids(
+ result, nx_to_rx_node_id_map
+ )
+ result = converted_result
+
assert result[1] == {0, 1, 2, 3, 5} and result[2] == {6, 7, 8}
def test_exterior_boundaries(three_by_three_grid):
+
graph = three_by_three_grid
for i in [0, 1, 2, 3, 5, 6, 7, 8]:
- graph.nodes[i]["boundary_node"] = True
- graph.nodes[i]["boundary_perim"] = 2
- graph.nodes[4]["boundary_node"] = False
+ graph.node_data(i)["boundary_node"] = True
+ graph.node_data(i)["boundary_perim"] = 2
+ graph.node_data(4)["boundary_node"] = False
assignment = {0: 1, 1: 1, 2: 2, 3: 1, 4: 1, 5: 2, 6: 2, 7: 2, 8: 2}
updaters = {
@@ -231,7 +322,13 @@ def test_exterior_boundaries(three_by_three_grid):
# 222 222
flips = {4: 2, 2: 1, 5: 1}
- new_partition = Partition(parent=partition, flips=flips)
+ # Convert the flips into internal node_ids
+ internal_flips = {}
+ for node_id, part in flips.items():
+ internal_node_id = partition.graph.internal_node_id_for_original_nx_node_id(node_id)
+ internal_flips[internal_node_id] = part
+
+ new_partition = Partition(parent=partition, flips=internal_flips)
result = new_partition["exterior_boundaries"]
@@ -241,12 +338,13 @@ def test_exterior_boundaries(three_by_three_grid):
def test_perimeter(three_by_three_grid):
graph = three_by_three_grid
for i in [0, 1, 2, 3, 5, 6, 7, 8]:
- graph.nodes[i]["boundary_node"] = True
- graph.nodes[i]["boundary_perim"] = 1
- graph.nodes[4]["boundary_node"] = False
+ graph.node_data(i)["boundary_node"] = True
+ # frm: TODO: Testing: Update test - boundary_perim should be 2 for corner nodes...
+ graph.node_data(i)["boundary_perim"] = 1
+ graph.node_data(4)["boundary_node"] = False
for edge in graph.edges:
- graph.edges[edge]["shared_perim"] = 1
+ graph.edge_data(edge)["shared_perim"] = 1
assignment = {0: 1, 1: 1, 2: 2, 3: 1, 4: 1, 5: 2, 6: 2, 7: 2, 8: 2}
updaters = {
@@ -275,6 +373,7 @@ def reject_half_of_all_flips(partition):
def test_elections_match_the_naive_computation(partition_with_election):
+
chain = MarkovChain(
propose_random_flip,
Validator([no_vanishing_districts, reject_half_of_all_flips]),
@@ -292,8 +391,8 @@ def test_elections_match_the_naive_computation(partition_with_election):
assert expected_party_totals == election_view.totals_for_party
-def expected_tally(partition, column):
+def expected_tally(partition, node_attribute_name):
return {
- part: sum(partition.graph.nodes[node][column] for node in nodes)
+ part: sum(partition.graph.node_data(node)[node_attribute_name] for node in nodes)
for part, nodes in partition.parts.items()
}
diff --git a/uv.lock b/uv.lock
new file mode 100644
index 00000000..a85491d8
--- /dev/null
+++ b/uv.lock
@@ -0,0 +1,2353 @@
+version = 1
+revision = 2
+requires-python = ">=3.11"
+resolution-markers = [
+ "python_full_version >= '3.12'",
+ "python_full_version < '3.12'",
+]
+
+[[package]]
+name = "alabaster"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" },
+]
+
+[[package]]
+name = "appnope"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" },
+]
+
+[[package]]
+name = "asttokens"
+version = "3.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
+]
+
+[[package]]
+name = "autopep8"
+version = "2.3.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycodestyle" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/50/d8/30873d2b7b57dee9263e53d142da044c4600a46f2d28374b3e38b023df16/autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", size = 92210, upload-time = "2025-01-14T14:46:18.454Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" },
+]
+
+[[package]]
+name = "babel"
+version = "2.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" },
+]
+
+[[package]]
+name = "black"
+version = "25.12.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "click" },
+ { name = "mypy-extensions" },
+ { name = "packaging" },
+ { name = "pathspec" },
+ { name = "platformdirs" },
+ { name = "pytokens" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/60/ad/7ac0d0e1e0612788dbc48e62aef8a8e8feffac7eb3d787db4e43b8462fa8/black-25.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0cfa263e85caea2cff57d8f917f9f51adae8e20b610e2b23de35b5b11ce691a", size = 1877003, upload-time = "2025-12-08T01:43:29.967Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/dd/a237e9f565f3617a88b49284b59cbca2a4f56ebe68676c1aad0ce36a54a7/black-25.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a2f578ae20c19c50a382286ba78bfbeafdf788579b053d8e4980afb079ab9be", size = 1712639, upload-time = "2025-12-08T01:52:46.756Z" },
+ { url = "https://files.pythonhosted.org/packages/12/80/e187079df1ea4c12a0c63282ddd8b81d5107db6d642f7d7b75a6bcd6fc21/black-25.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e1b65634b0e471d07ff86ec338819e2ef860689859ef4501ab7ac290431f9b", size = 1758143, upload-time = "2025-12-08T01:45:29.137Z" },
+ { url = "https://files.pythonhosted.org/packages/93/b5/3096ccee4f29dc2c3aac57274326c4d2d929a77e629f695f544e159bfae4/black-25.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a3fa71e3b8dd9f7c6ac4d818345237dfb4175ed3bf37cd5a581dbc4c034f1ec5", size = 1420698, upload-time = "2025-12-08T01:45:53.379Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/39/f81c0ffbc25ffbe61c7d0385bf277e62ffc3e52f5ee668d7369d9854fadf/black-25.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:51e267458f7e650afed8445dc7edb3187143003d52a1b710c7321aef22aa9655", size = 1229317, upload-time = "2025-12-08T01:46:35.606Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/bd/26083f805115db17fda9877b3c7321d08c647df39d0df4c4ca8f8450593e/black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a", size = 1924178, upload-time = "2025-12-08T01:49:51.048Z" },
+ { url = "https://files.pythonhosted.org/packages/89/6b/ea00d6651561e2bdd9231c4177f4f2ae19cc13a0b0574f47602a7519b6ca/black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783", size = 1742643, upload-time = "2025-12-08T01:49:59.09Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/f3/360fa4182e36e9875fabcf3a9717db9d27a8d11870f21cff97725c54f35b/black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59", size = 1800158, upload-time = "2025-12-08T01:44:27.301Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/08/2c64830cb6616278067e040acca21d4f79727b23077633953081c9445d61/black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892", size = 1426197, upload-time = "2025-12-08T01:45:51.198Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/60/a93f55fd9b9816b7432cf6842f0e3000fdd5b7869492a04b9011a133ee37/black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43", size = 1237266, upload-time = "2025-12-08T01:45:10.556Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/52/c551e36bc95495d2aa1a37d50566267aa47608c81a53f91daa809e03293f/black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5", size = 1923809, upload-time = "2025-12-08T01:46:55.126Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/f7/aac9b014140ee56d247e707af8db0aae2e9efc28d4a8aba92d0abd7ae9d1/black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f", size = 1742384, upload-time = "2025-12-08T01:49:37.022Z" },
+ { url = "https://files.pythonhosted.org/packages/74/98/38aaa018b2ab06a863974c12b14a6266badc192b20603a81b738c47e902e/black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf", size = 1798761, upload-time = "2025-12-08T01:46:05.386Z" },
+ { url = "https://files.pythonhosted.org/packages/16/3a/a8ac542125f61574a3f015b521ca83b47321ed19bb63fe6d7560f348bfe1/black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d", size = 1429180, upload-time = "2025-12-08T01:45:34.903Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/2d/bdc466a3db9145e946762d52cd55b1385509d9f9004fec1c97bdc8debbfb/black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce", size = 1239350, upload-time = "2025-12-08T01:46:09.458Z" },
+ { url = "https://files.pythonhosted.org/packages/35/46/1d8f2542210c502e2ae1060b2e09e47af6a5e5963cb78e22ec1a11170b28/black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5", size = 1917015, upload-time = "2025-12-08T01:53:27.987Z" },
+ { url = "https://files.pythonhosted.org/packages/41/37/68accadf977672beb8e2c64e080f568c74159c1aaa6414b4cd2aef2d7906/black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f", size = 1741830, upload-time = "2025-12-08T01:54:36.861Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/76/03608a9d8f0faad47a3af3a3c8c53af3367f6c0dd2d23a84710456c7ac56/black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f", size = 1791450, upload-time = "2025-12-08T01:44:52.581Z" },
+ { url = "https://files.pythonhosted.org/packages/06/99/b2a4bd7dfaea7964974f947e1c76d6886d65fe5d24f687df2d85406b2609/black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83", size = 1452042, upload-time = "2025-12-08T01:46:13.188Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/7c/d9825de75ae5dd7795d007681b752275ea85a1c5d83269b4b9c754c2aaab/black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b", size = 1267446, upload-time = "2025-12-08T01:46:14.497Z" },
+ { url = "https://files.pythonhosted.org/packages/68/11/21331aed19145a952ad28fca2756a1433ee9308079bd03bd898e903a2e53/black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828", size = 206191, upload-time = "2025-12-08T01:40:50.963Z" },
+]
+
+[[package]]
+name = "certifi"
+version = "2025.11.12"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
+]
+
+[[package]]
+name = "cffi"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pycparser", marker = "implementation_name != 'PyPy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },
+ { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },
+ { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
+ { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
+ { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
+ { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
+ { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
+ { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
+ { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
+ { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
+ { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
+ { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
+ { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
+ { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
+ { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
+ { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
+]
+
+[[package]]
+name = "cfgv"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.4.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
+ { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
+ { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
+ { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
+ { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
+ { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
+ { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
+ { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
+ { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
+ { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
+ { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
+ { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
+ { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
+ { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
+ { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
+ { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
+ { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
+ { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
+ { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
+ { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
+ { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
+ { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
+ { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
+ { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
+ { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
+ { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
+ { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
+]
+
+[[package]]
+name = "click"
+version = "8.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
+[[package]]
+name = "comm"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" },
+]
+
+[[package]]
+name = "commonmark"
+version = "0.9.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/60/48/a60f593447e8f0894ebb7f6e6c1f25dafc5e89c5879fdc9360ae93ff83f0/commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60", size = 95764, upload-time = "2019-10-04T15:37:39.817Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b1/92/dfd892312d822f36c55366118b95d914e5f16de11044a27cf10a7d71bbbf/commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9", size = 51068, upload-time = "2019-10-04T15:37:37.674Z" },
+]
+
+[[package]]
+name = "contourpy"
+version = "1.3.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" },
+ { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" },
+ { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" },
+ { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" },
+ { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" },
+ { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" },
+ { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" },
+ { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" },
+ { url = "https://files.pythonhosted.org/packages/63/12/897aeebfb475b7748ea67b61e045accdfcf0d971f8a588b67108ed7f5512/contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8", size = 379536, upload-time = "2025-07-26T12:01:25.91Z" },
+ { url = "https://files.pythonhosted.org/packages/43/8a/a8c584b82deb248930ce069e71576fc09bd7174bbd35183b7943fb1064fd/contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea", size = 384397, upload-time = "2025-07-26T12:01:27.152Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/8f/ec6289987824b29529d0dfda0d74a07cec60e54b9c92f3c9da4c0ac732de/contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1", size = 362601, upload-time = "2025-07-26T12:01:28.808Z" },
+ { url = "https://files.pythonhosted.org/packages/05/0a/a3fe3be3ee2dceb3e615ebb4df97ae6f3828aa915d3e10549ce016302bd1/contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7", size = 1331288, upload-time = "2025-07-26T12:01:31.198Z" },
+ { url = "https://files.pythonhosted.org/packages/33/1d/acad9bd4e97f13f3e2b18a3977fe1b4a37ecf3d38d815333980c6c72e963/contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411", size = 1403386, upload-time = "2025-07-26T12:01:33.947Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/8f/5847f44a7fddf859704217a99a23a4f6417b10e5ab1256a179264561540e/contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69", size = 185018, upload-time = "2025-07-26T12:01:35.64Z" },
+ { url = "https://files.pythonhosted.org/packages/19/e8/6026ed58a64563186a9ee3f29f41261fd1828f527dd93d33b60feca63352/contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b", size = 226567, upload-time = "2025-07-26T12:01:36.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/e2/f05240d2c39a1ed228d8328a78b6f44cd695f7ef47beb3e684cf93604f86/contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc", size = 193655, upload-time = "2025-07-26T12:01:37.999Z" },
+ { url = "https://files.pythonhosted.org/packages/68/35/0167aad910bbdb9599272bd96d01a9ec6852f36b9455cf2ca67bd4cc2d23/contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5", size = 293257, upload-time = "2025-07-26T12:01:39.367Z" },
+ { url = "https://files.pythonhosted.org/packages/96/e4/7adcd9c8362745b2210728f209bfbcf7d91ba868a2c5f40d8b58f54c509b/contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1", size = 274034, upload-time = "2025-07-26T12:01:40.645Z" },
+ { url = "https://files.pythonhosted.org/packages/73/23/90e31ceeed1de63058a02cb04b12f2de4b40e3bef5e082a7c18d9c8ae281/contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286", size = 334672, upload-time = "2025-07-26T12:01:41.942Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/93/b43d8acbe67392e659e1d984700e79eb67e2acb2bd7f62012b583a7f1b55/contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5", size = 381234, upload-time = "2025-07-26T12:01:43.499Z" },
+ { url = "https://files.pythonhosted.org/packages/46/3b/bec82a3ea06f66711520f75a40c8fc0b113b2a75edb36aa633eb11c4f50f/contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67", size = 385169, upload-time = "2025-07-26T12:01:45.219Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/32/e0f13a1c5b0f8572d0ec6ae2f6c677b7991fafd95da523159c19eff0696a/contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9", size = 362859, upload-time = "2025-07-26T12:01:46.519Z" },
+ { url = "https://files.pythonhosted.org/packages/33/71/e2a7945b7de4e58af42d708a219f3b2f4cff7386e6b6ab0a0fa0033c49a9/contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659", size = 1332062, upload-time = "2025-07-26T12:01:48.964Z" },
+ { url = "https://files.pythonhosted.org/packages/12/fc/4e87ac754220ccc0e807284f88e943d6d43b43843614f0a8afa469801db0/contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7", size = 1403932, upload-time = "2025-07-26T12:01:51.979Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/2e/adc197a37443f934594112222ac1aa7dc9a98faf9c3842884df9a9d8751d/contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d", size = 185024, upload-time = "2025-07-26T12:01:53.245Z" },
+ { url = "https://files.pythonhosted.org/packages/18/0b/0098c214843213759692cc638fce7de5c289200a830e5035d1791d7a2338/contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263", size = 226578, upload-time = "2025-07-26T12:01:54.422Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/9a/2f6024a0c5995243cd63afdeb3651c984f0d2bc727fd98066d40e141ad73/contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9", size = 193524, upload-time = "2025-07-26T12:01:55.73Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/b3/f8a1a86bd3298513f500e5b1f5fd92b69896449f6cab6a146a5d52715479/contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d", size = 306730, upload-time = "2025-07-26T12:01:57.051Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/11/4780db94ae62fc0c2053909b65dc3246bd7cecfc4f8a20d957ad43aa4ad8/contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216", size = 287897, upload-time = "2025-07-26T12:01:58.663Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/15/e59f5f3ffdd6f3d4daa3e47114c53daabcb18574a26c21f03dc9e4e42ff0/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae", size = 326751, upload-time = "2025-07-26T12:02:00.343Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/81/03b45cfad088e4770b1dcf72ea78d3802d04200009fb364d18a493857210/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20", size = 375486, upload-time = "2025-07-26T12:02:02.128Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/ba/49923366492ffbdd4486e970d421b289a670ae8cf539c1ea9a09822b371a/contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99", size = 388106, upload-time = "2025-07-26T12:02:03.615Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/52/5b00ea89525f8f143651f9f03a0df371d3cbd2fccd21ca9b768c7a6500c2/contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b", size = 352548, upload-time = "2025-07-26T12:02:05.165Z" },
+ { url = "https://files.pythonhosted.org/packages/32/1d/a209ec1a3a3452d490f6b14dd92e72280c99ae3d1e73da74f8277d4ee08f/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a", size = 1322297, upload-time = "2025-07-26T12:02:07.379Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/9e/46f0e8ebdd884ca0e8877e46a3f4e633f6c9c8c4f3f6e72be3fe075994aa/contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e", size = 1391023, upload-time = "2025-07-26T12:02:10.171Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/70/f308384a3ae9cd2209e0849f33c913f658d3326900d0ff5d378d6a1422d2/contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3", size = 196157, upload-time = "2025-07-26T12:02:11.488Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/dd/880f890a6663b84d9e34a6f88cded89d78f0091e0045a284427cb6b18521/contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8", size = 240570, upload-time = "2025-07-26T12:02:12.754Z" },
+ { url = "https://files.pythonhosted.org/packages/80/99/2adc7d8ffead633234817ef8e9a87115c8a11927a94478f6bb3d3f4d4f7d/contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301", size = 199713, upload-time = "2025-07-26T12:02:14.4Z" },
+ { url = "https://files.pythonhosted.org/packages/72/8b/4546f3ab60f78c514ffb7d01a0bd743f90de36f0019d1be84d0a708a580a/contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a", size = 292189, upload-time = "2025-07-26T12:02:16.095Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/e1/3542a9cb596cadd76fcef413f19c79216e002623158befe6daa03dbfa88c/contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77", size = 273251, upload-time = "2025-07-26T12:02:17.524Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/71/f93e1e9471d189f79d0ce2497007731c1e6bf9ef6d1d61b911430c3db4e5/contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5", size = 335810, upload-time = "2025-07-26T12:02:18.9Z" },
+ { url = "https://files.pythonhosted.org/packages/91/f9/e35f4c1c93f9275d4e38681a80506b5510e9327350c51f8d4a5a724d178c/contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4", size = 382871, upload-time = "2025-07-26T12:02:20.418Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/71/47b512f936f66a0a900d81c396a7e60d73419868fba959c61efed7a8ab46/contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36", size = 386264, upload-time = "2025-07-26T12:02:21.916Z" },
+ { url = "https://files.pythonhosted.org/packages/04/5f/9ff93450ba96b09c7c2b3f81c94de31c89f92292f1380261bd7195bea4ea/contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3", size = 363819, upload-time = "2025-07-26T12:02:23.759Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/a6/0b185d4cc480ee494945cde102cb0149ae830b5fa17bf855b95f2e70ad13/contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b", size = 1333650, upload-time = "2025-07-26T12:02:26.181Z" },
+ { url = "https://files.pythonhosted.org/packages/43/d7/afdc95580ca56f30fbcd3060250f66cedbde69b4547028863abd8aa3b47e/contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36", size = 1404833, upload-time = "2025-07-26T12:02:28.782Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/e2/366af18a6d386f41132a48f033cbd2102e9b0cf6345d35ff0826cd984566/contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d", size = 189692, upload-time = "2025-07-26T12:02:30.128Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/c2/57f54b03d0f22d4044b8afb9ca0e184f8b1afd57b4f735c2fa70883dc601/contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd", size = 232424, upload-time = "2025-07-26T12:02:31.395Z" },
+ { url = "https://files.pythonhosted.org/packages/18/79/a9416650df9b525737ab521aa181ccc42d56016d2123ddcb7b58e926a42c/contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339", size = 198300, upload-time = "2025-07-26T12:02:32.956Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/42/38c159a7d0f2b7b9c04c64ab317042bb6952b713ba875c1681529a2932fe/contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772", size = 306769, upload-time = "2025-07-26T12:02:34.2Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/6c/26a8205f24bca10974e77460de68d3d7c63e282e23782f1239f226fcae6f/contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77", size = 287892, upload-time = "2025-07-26T12:02:35.807Z" },
+ { url = "https://files.pythonhosted.org/packages/66/06/8a475c8ab718ebfd7925661747dbb3c3ee9c82ac834ccb3570be49d129f4/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13", size = 326748, upload-time = "2025-07-26T12:02:37.193Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/a3/c5ca9f010a44c223f098fccd8b158bb1cb287378a31ac141f04730dc49be/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe", size = 375554, upload-time = "2025-07-26T12:02:38.894Z" },
+ { url = "https://files.pythonhosted.org/packages/80/5b/68bd33ae63fac658a4145088c1e894405e07584a316738710b636c6d0333/contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f", size = 388118, upload-time = "2025-07-26T12:02:40.642Z" },
+ { url = "https://files.pythonhosted.org/packages/40/52/4c285a6435940ae25d7410a6c36bda5145839bc3f0beb20c707cda18b9d2/contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0", size = 352555, upload-time = "2025-07-26T12:02:42.25Z" },
+ { url = "https://files.pythonhosted.org/packages/24/ee/3e81e1dd174f5c7fefe50e85d0892de05ca4e26ef1c9a59c2a57e43b865a/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4", size = 1322295, upload-time = "2025-07-26T12:02:44.668Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/b2/6d913d4d04e14379de429057cd169e5e00f6c2af3bb13e1710bcbdb5da12/contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f", size = 1391027, upload-time = "2025-07-26T12:02:47.09Z" },
+ { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" },
+ { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" },
+]
+
+[[package]]
+name = "coverage"
+version = "7.13.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/dc/888bf90d8b1c3d0b4020a40e52b9f80957d75785931ec66c7dfaccc11c7d/coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820", size = 218104, upload-time = "2025-12-08T13:12:33.333Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/ea/069d51372ad9c380214e86717e40d1a743713a2af191cfba30a0911b0a4a/coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f", size = 218606, upload-time = "2025-12-08T13:12:34.498Z" },
+ { url = "https://files.pythonhosted.org/packages/68/09/77b1c3a66c2aa91141b6c4471af98e5b1ed9b9e6d17255da5eb7992299e3/coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96", size = 248999, upload-time = "2025-12-08T13:12:36.02Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/32/2e2f96e9d5691eaf1181d9040f850b8b7ce165ea10810fd8e2afa534cef7/coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259", size = 250925, upload-time = "2025-12-08T13:12:37.221Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/45/b88ddac1d7978859b9a39a8a50ab323186148f1d64bc068f86fc77706321/coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb", size = 253032, upload-time = "2025-12-08T13:12:38.763Z" },
+ { url = "https://files.pythonhosted.org/packages/71/cb/e15513f94c69d4820a34b6bf3d2b1f9f8755fa6021be97c7065442d7d653/coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9", size = 249134, upload-time = "2025-12-08T13:12:40.382Z" },
+ { url = "https://files.pythonhosted.org/packages/09/61/d960ff7dc9e902af3310ce632a875aaa7860f36d2bc8fc8b37ee7c1b82a5/coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030", size = 250731, upload-time = "2025-12-08T13:12:41.992Z" },
+ { url = "https://files.pythonhosted.org/packages/98/34/c7c72821794afc7c7c2da1db8f00c2c98353078aa7fb6b5ff36aac834b52/coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833", size = 248795, upload-time = "2025-12-08T13:12:43.331Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/5b/e0f07107987a43b2def9aa041c614ddb38064cbf294a71ef8c67d43a0cdd/coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8", size = 248514, upload-time = "2025-12-08T13:12:44.546Z" },
+ { url = "https://files.pythonhosted.org/packages/71/c2/c949c5d3b5e9fc6dd79e1b73cdb86a59ef14f3709b1d72bf7668ae12e000/coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753", size = 249424, upload-time = "2025-12-08T13:12:45.759Z" },
+ { url = "https://files.pythonhosted.org/packages/11/f1/bbc009abd6537cec0dffb2cc08c17a7f03de74c970e6302db4342a6e05af/coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b", size = 220597, upload-time = "2025-12-08T13:12:47.378Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/f6/d9977f2fb51c10fbaed0718ce3d0a8541185290b981f73b1d27276c12d91/coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe", size = 221536, upload-time = "2025-12-08T13:12:48.7Z" },
+ { url = "https://files.pythonhosted.org/packages/be/ad/3fcf43fd96fb43e337a3073dea63ff148dcc5c41ba7a14d4c7d34efb2216/coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7", size = 220206, upload-time = "2025-12-08T13:12:50.365Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/f1/2619559f17f31ba00fc40908efd1fbf1d0a5536eb75dc8341e7d660a08de/coverage-7.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0b3d67d31383c4c68e19a88e28fc4c2e29517580f1b0ebec4a069d502ce1e0bf", size = 218274, upload-time = "2025-12-08T13:12:52.095Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/11/30d71ae5d6e949ff93b2a79a2c1b4822e00423116c5c6edfaeef37301396/coverage-7.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:581f086833d24a22c89ae0fe2142cfaa1c92c930adf637ddf122d55083fb5a0f", size = 218638, upload-time = "2025-12-08T13:12:53.418Z" },
+ { url = "https://files.pythonhosted.org/packages/79/c2/fce80fc6ded8d77e53207489d6065d0fed75db8951457f9213776615e0f5/coverage-7.13.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0a3a30f0e257df382f5f9534d4ce3d4cf06eafaf5192beb1a7bd066cb10e78fb", size = 250129, upload-time = "2025-12-08T13:12:54.744Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/b6/51b5d1eb6fcbb9a1d5d6984e26cbe09018475c2922d554fd724dd0f056ee/coverage-7.13.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:583221913fbc8f53b88c42e8dbb8fca1d0f2e597cb190ce45916662b8b9d9621", size = 252885, upload-time = "2025-12-08T13:12:56.401Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/f8/972a5affea41de798691ab15d023d3530f9f56a72e12e243f35031846ff7/coverage-7.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f5d9bd30756fff3e7216491a0d6d520c448d5124d3d8e8f56446d6412499e74", size = 253974, upload-time = "2025-12-08T13:12:57.718Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/56/116513aee860b2c7968aa3506b0f59b22a959261d1dbf3aea7b4450a7520/coverage-7.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a23e5a1f8b982d56fa64f8e442e037f6ce29322f1f9e6c2344cd9e9f4407ee57", size = 250538, upload-time = "2025-12-08T13:12:59.254Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/75/074476d64248fbadf16dfafbf93fdcede389ec821f74ca858d7c87d2a98c/coverage-7.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b01c22bc74a7fb44066aaf765224c0d933ddf1f5047d6cdfe4795504a4493f8", size = 251912, upload-time = "2025-12-08T13:13:00.604Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/d2/aa4f8acd1f7c06024705c12609d8698c51b27e4d635d717cd1934c9668e2/coverage-7.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:898cce66d0836973f48dda4e3514d863d70142bdf6dfab932b9b6a90ea5b222d", size = 250054, upload-time = "2025-12-08T13:13:01.892Z" },
+ { url = "https://files.pythonhosted.org/packages/19/98/8df9e1af6a493b03694a1e8070e024e7d2cdc77adedc225a35e616d505de/coverage-7.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:3ab483ea0e251b5790c2aac03acde31bff0c736bf8a86829b89382b407cd1c3b", size = 249619, upload-time = "2025-12-08T13:13:03.236Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/71/f8679231f3353018ca66ef647fa6fe7b77e6bff7845be54ab84f86233363/coverage-7.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d84e91521c5e4cb6602fe11ece3e1de03b2760e14ae4fcf1a4b56fa3c801fcd", size = 251496, upload-time = "2025-12-08T13:13:04.511Z" },
+ { url = "https://files.pythonhosted.org/packages/04/86/9cb406388034eaf3c606c22094edbbb82eea1fa9d20c0e9efadff20d0733/coverage-7.13.0-cp312-cp312-win32.whl", hash = "sha256:193c3887285eec1dbdb3f2bd7fbc351d570ca9c02ca756c3afbc71b3c98af6ef", size = 220808, upload-time = "2025-12-08T13:13:06.422Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/59/af483673df6455795daf5f447c2f81a3d2fcfc893a22b8ace983791f6f34/coverage-7.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:4f3e223b2b2db5e0db0c2b97286aba0036ca000f06aca9b12112eaa9af3d92ae", size = 221616, upload-time = "2025-12-08T13:13:07.95Z" },
+ { url = "https://files.pythonhosted.org/packages/64/b0/959d582572b30a6830398c60dd419c1965ca4b5fb38ac6b7093a0d50ca8d/coverage-7.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:086cede306d96202e15a4b77ace8472e39d9f4e5f9fd92dd4fecdfb2313b2080", size = 220261, upload-time = "2025-12-08T13:13:09.581Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" },
+ { url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" },
+ { url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" },
+ { url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" },
+ { url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" },
+ { url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" },
+ { url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" },
+ { url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343, upload-time = "2025-12-08T13:13:50.811Z" },
+ { url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672, upload-time = "2025-12-08T13:13:52.284Z" },
+ { url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715, upload-time = "2025-12-08T13:13:53.791Z" },
+ { url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225, upload-time = "2025-12-08T13:13:55.274Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559, upload-time = "2025-12-08T13:13:57.161Z" },
+ { url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724, upload-time = "2025-12-08T13:13:58.692Z" },
+ { url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582, upload-time = "2025-12-08T13:14:00.642Z" },
+ { url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538, upload-time = "2025-12-08T13:14:02.556Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349, upload-time = "2025-12-08T13:14:04.015Z" },
+ { url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011, upload-time = "2025-12-08T13:14:05.505Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091, upload-time = "2025-12-08T13:14:07.127Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904, upload-time = "2025-12-08T13:14:08.542Z" },
+ { url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480, upload-time = "2025-12-08T13:14:10.958Z" },
+ { url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074, upload-time = "2025-12-08T13:14:13.345Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342, upload-time = "2025-12-08T13:14:15.02Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713, upload-time = "2025-12-08T13:14:16.907Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825, upload-time = "2025-12-08T13:14:18.68Z" },
+ { url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233, upload-time = "2025-12-08T13:14:20.55Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779, upload-time = "2025-12-08T13:14:22.367Z" },
+ { url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700, upload-time = "2025-12-08T13:14:24.309Z" },
+ { url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302, upload-time = "2025-12-08T13:14:26.068Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136, upload-time = "2025-12-08T13:14:27.604Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467, upload-time = "2025-12-08T13:14:29.09Z" },
+ { url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875, upload-time = "2025-12-08T13:14:31.106Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982, upload-time = "2025-12-08T13:14:33.1Z" },
+ { url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016, upload-time = "2025-12-08T13:14:34.601Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" },
+]
+
+[package.optional-dependencies]
+toml = [
+ { name = "tomli", marker = "python_full_version <= '3.11'" },
+]
+
+[[package]]
+name = "cycler"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615, upload-time = "2023-10-07T05:32:18.335Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321, upload-time = "2023-10-07T05:32:16.783Z" },
+]
+
+[[package]]
+name = "debugpy"
+version = "1.8.18"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/62/1a/7cb5531840d7ba5d9329644109e62adee41f2f0083d9f8a4039f01de58cf/debugpy-1.8.18.tar.gz", hash = "sha256:02551b1b84a91faadd2db9bc4948873f2398190c95b3cc6f97dc706f43e8c433", size = 1644467, upload-time = "2025-12-10T19:48:07.236Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ac/72/93167809b44a8e6971a1ff0b3e956cca4832fd7e8e47ce7b2b16be95795a/debugpy-1.8.18-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:3dae1d65e581406a4d7c1bb44391f47e621b8c87c5639b6607e6007a5d823205", size = 2207588, upload-time = "2025-12-10T19:48:15.44Z" },
+ { url = "https://files.pythonhosted.org/packages/05/8b/0f5a54b239dac880ccc16e0b29fdecfb444635f2495cc3705548e24938ab/debugpy-1.8.18-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:8804d1288e6006629a87d53eb44b7b66e695d428ac529ffd75bfc7d730a9c821", size = 3170762, upload-time = "2025-12-10T19:48:17.192Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/e4/7631d0ecd102085aa1cf5eb38f50e00036dec2c4571f236d2189ed842ee3/debugpy-1.8.18-cp311-cp311-win32.whl", hash = "sha256:ded8a5a413bd0a249b3c0be9f43128f437755180ac431222a6354c7d76a76a54", size = 5158530, upload-time = "2025-12-10T19:48:18.701Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/51/97674a4af4dc960a4eb0882b6c41c111e6a0a79c6b275df202f392e751cb/debugpy-1.8.18-cp311-cp311-win_amd64.whl", hash = "sha256:df6c1243dedcb6bf9a5dc1c5668009e2b5508b8525f27d9821be91da57827743", size = 5182452, upload-time = "2025-12-10T19:48:20.328Z" },
+ { url = "https://files.pythonhosted.org/packages/83/01/439626e3572a33ac543f25bc1dac1e80bc01c7ce83f3c24dc4441302ca13/debugpy-1.8.18-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:530c38114725505a7e4ea95328dbc24aabb9be708c6570623c8163412e6d1d6b", size = 2549961, upload-time = "2025-12-10T19:48:21.73Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/73/1eeaa15c20a2b627be57a65bc1ebf2edd8d896950eac323588b127d776f2/debugpy-1.8.18-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:a114865099283cbed4c9330cb0c9cb7a04cfa92e803577843657302d526141ec", size = 4309855, upload-time = "2025-12-10T19:48:23.41Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/6f/2da8ded21ae55df7067e57bd7f67ffed7e08b634f29bdba30c03d3f19918/debugpy-1.8.18-cp312-cp312-win32.whl", hash = "sha256:4d26736dfabf404e9f3032015ec7b0189e7396d0664e29e5bdbe7ac453043c95", size = 5280577, upload-time = "2025-12-10T19:48:25.386Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/8e/ebe887218c5b84f9421de7eb7bb7cdf196e84535c3f504a562219297d755/debugpy-1.8.18-cp312-cp312-win_amd64.whl", hash = "sha256:7e68ba950acbcf95ee862210133681f408cbb78d1c9badbb515230ec55ed6487", size = 5322458, upload-time = "2025-12-10T19:48:28.049Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/3f/45af037e91e308274a092eb6a86282865fb1f11148cdb7616e811aae33d7/debugpy-1.8.18-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:75d14dd04b617ee38e46786394ec0dd5e1ac5e3d10ffb034fd6c7b72111174c2", size = 2538826, upload-time = "2025-12-10T19:48:29.434Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/f4/2de6bf624de05134d1bbe0a8750d484363cd212c3ade3d04f5c77d47d0ce/debugpy-1.8.18-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:1b224887af5121fa702f9f542968170d104e3f9cac827d85fdefe89702dc235c", size = 4292542, upload-time = "2025-12-10T19:48:30.836Z" },
+ { url = "https://files.pythonhosted.org/packages/93/54/89de7ef84d5ac39fc64a773feaedd902536cc5295814cd22d19c6d9dea35/debugpy-1.8.18-cp313-cp313-win32.whl", hash = "sha256:636a5445a3336e4aba323a3545ca2bb373b04b0bc14084a4eb20c989db44429f", size = 5280460, upload-time = "2025-12-10T19:48:32.696Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/59/651329e618406229edbef6508a5aa05e43cd027f042740c5b27e46854b23/debugpy-1.8.18-cp313-cp313-win_amd64.whl", hash = "sha256:6da217ac8c1152d698b9809484d50c75bef9cc02fd6886a893a6df81ec952ff8", size = 5322399, upload-time = "2025-12-10T19:48:35.057Z" },
+ { url = "https://files.pythonhosted.org/packages/36/59/5e8bf46a66ca9dfcd0ce4f35c07085aeb60d99bf5c52135973a4e197ed41/debugpy-1.8.18-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:be7f622d250fe3429571e84572eb771023f1da22c754f28d2c60a10d74a4cc1b", size = 2537336, upload-time = "2025-12-10T19:48:36.463Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/5a/3b37cc266a69da83a4febaa4267bb2062d4bec5287036e2f23d9a30a788c/debugpy-1.8.18-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:df8bf7cd78019d5d155213bf5a1818b36403d0c3758d669e76827d4db026b840", size = 4268696, upload-time = "2025-12-10T19:48:37.855Z" },
+ { url = "https://files.pythonhosted.org/packages/de/4b/1e13586444440e5754b70055449b70afa187aaa167fa4c20c0c05d9c3b80/debugpy-1.8.18-cp314-cp314-win32.whl", hash = "sha256:32dd56d50fe15c47d0f930a7f0b9d3e5eb8ed04770bc6c313fba6d226f87e1e8", size = 5280624, upload-time = "2025-12-10T19:48:39.28Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/21/f8c12baa16212859269dc4c3e4b413778ec1154d332896d3c4cca96ac660/debugpy-1.8.18-cp314-cp314-win_amd64.whl", hash = "sha256:714b61d753cfe3ed5e7bf0aad131506d750e271726ac86e3e265fd7eeebbe765", size = 5321982, upload-time = "2025-12-10T19:48:41.086Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/0d/bf7ac329c132436c57124202b5b5ccd6366e5d8e75eeb184cf078c826e8d/debugpy-1.8.18-py2.py3-none-any.whl", hash = "sha256:ab8cf0abe0fe2dfe1f7e65abc04b1db8740f9be80c1274acb625855c5c3ece6e", size = 5286576, upload-time = "2025-12-10T19:48:56.071Z" },
+]
+
+[[package]]
+name = "decorator"
+version = "5.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
+]
+
+[[package]]
+name = "distlib"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
+]
+
+[[package]]
+name = "docutils"
+version = "0.21.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" },
+]
+
+[[package]]
+name = "executing"
+version = "2.2.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
+]
+
+[[package]]
+name = "filelock"
+version = "3.20.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
+]
+
+[[package]]
+name = "fonttools"
+version = "4.61.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/33/f9/0e84d593c0e12244150280a630999835a64f2852276161b62a0f98318de0/fonttools-4.61.0.tar.gz", hash = "sha256:ec520a1f0c7758d7a858a00f090c1745f6cde6a7c5e76fb70ea4044a15f712e7", size = 3561884, upload-time = "2025-11-28T17:05:49.491Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/be/5aa89cdddf2863d8afbdc19eb8ec5d8d35d40eeeb8e6cf52c5ff1c2dbd33/fonttools-4.61.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a32a16951cbf113d38f1dd8551b277b6e06e0f6f776fece0f99f746d739e1be3", size = 2847553, upload-time = "2025-11-28T17:04:30.539Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/3e/6ff643b07cead1236a534f51291ae2981721cf419135af5b740c002a66dd/fonttools-4.61.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:328a9c227984bebaf69f3ac9062265f8f6acc7ddf2e4e344c63358579af0aa3d", size = 2388298, upload-time = "2025-11-28T17:04:32.161Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/15/fca8dfbe7b482e6f240b1aad0ed7c6e2e75e7a28efa3d3a03b570617b5e5/fonttools-4.61.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2f0bafc8a3b3749c69cc610e5aa3da832d39c2a37a68f03d18ec9a02ecaac04a", size = 5054133, upload-time = "2025-11-28T17:04:34.035Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/a2/821c61c691b21fd09e07528a9a499cc2b075ac83ddb644aa16c9875a64bc/fonttools-4.61.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b5ca59b7417d149cf24e4c1933c9f44b2957424fc03536f132346d5242e0ebe5", size = 5031410, upload-time = "2025-11-28T17:04:36.141Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/f6/8b16339e93d03c732c8a23edefe3061b17a5f9107ddc47a3215ecd054cac/fonttools-4.61.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:df8cbce85cf482eb01f4551edca978c719f099c623277bda8332e5dbe7dba09d", size = 5030005, upload-time = "2025-11-28T17:04:38.314Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/eb/d4e150427bdaa147755239c931bbce829a88149ade5bfd8a327afe565567/fonttools-4.61.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7fb5b84f48a6a733ca3d7f41aa9551908ccabe8669ffe79586560abcc00a9cfd", size = 5154026, upload-time = "2025-11-28T17:04:40.34Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/5f/3dd00ce0dba6759943c707b1830af8c0bcf6f8f1a9fe46cb82e7ac2aaa74/fonttools-4.61.0-cp311-cp311-win32.whl", hash = "sha256:787ef9dfd1ea9fe49573c272412ae5f479d78e671981819538143bec65863865", size = 2276035, upload-time = "2025-11-28T17:04:42.59Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/44/798c472f096ddf12955eddb98f4f7c906e7497695d04ce073ddf7161d134/fonttools-4.61.0-cp311-cp311-win_amd64.whl", hash = "sha256:14fafda386377b6131d9e448af42d0926bad47e038de0e5ba1d58c25d621f028", size = 2327290, upload-time = "2025-11-28T17:04:44.57Z" },
+ { url = "https://files.pythonhosted.org/packages/00/5d/19e5939f773c7cb05480fe2e881d63870b63ee2b4bdb9a77d55b1d36c7b9/fonttools-4.61.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e24a1565c4e57111ec7f4915f8981ecbb61adf66a55f378fdc00e206059fcfef", size = 2846930, upload-time = "2025-11-28T17:04:46.639Z" },
+ { url = "https://files.pythonhosted.org/packages/25/b2/0658faf66f705293bd7e739a4f038302d188d424926be9c59bdad945664b/fonttools-4.61.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2bfacb5351303cae9f072ccf3fc6ecb437a6f359c0606bae4b1ab6715201d87", size = 2383016, upload-time = "2025-11-28T17:04:48.525Z" },
+ { url = "https://files.pythonhosted.org/packages/29/a3/1fa90b95b690f0d7541f48850adc40e9019374d896c1b8148d15012b2458/fonttools-4.61.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0bdcf2e29d65c26299cc3d502f4612365e8b90a939f46cd92d037b6cb7bb544a", size = 4949425, upload-time = "2025-11-28T17:04:50.482Z" },
+ { url = "https://files.pythonhosted.org/packages/af/00/acf18c00f6c501bd6e05ee930f926186f8a8e268265407065688820f1c94/fonttools-4.61.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e6cd0d9051b8ddaf7385f99dd82ec2a058e2b46cf1f1961e68e1ff20fcbb61af", size = 4999632, upload-time = "2025-11-28T17:04:52.508Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/e0/19a2b86e54109b1d2ee8743c96a1d297238ae03243897bc5345c0365f34d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e074bc07c31406f45c418e17c1722e83560f181d122c412fa9e815df0ff74810", size = 4939438, upload-time = "2025-11-28T17:04:54.437Z" },
+ { url = "https://files.pythonhosted.org/packages/04/35/7b57a5f57d46286360355eff8d6b88c64ab6331107f37a273a71c803798d/fonttools-4.61.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a9b78da5d5faa17e63b2404b77feeae105c1b7e75f26020ab7a27b76e02039f", size = 5088960, upload-time = "2025-11-28T17:04:56.348Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/0e/6c5023eb2e0fe5d1ababc7e221e44acd3ff668781489cc1937a6f83d620a/fonttools-4.61.0-cp312-cp312-win32.whl", hash = "sha256:9821ed77bb676736b88fa87a737c97b6af06e8109667e625a4f00158540ce044", size = 2264404, upload-time = "2025-11-28T17:04:58.149Z" },
+ { url = "https://files.pythonhosted.org/packages/36/0b/63273128c7c5df19b1e4cd92e0a1e6ea5bb74a400c4905054c96ad60a675/fonttools-4.61.0-cp312-cp312-win_amd64.whl", hash = "sha256:0011d640afa61053bc6590f9a3394bd222de7cfde19346588beabac374e9d8ac", size = 2314427, upload-time = "2025-11-28T17:04:59.812Z" },
+ { url = "https://files.pythonhosted.org/packages/17/45/334f0d7f181e5473cfb757e1b60f4e60e7fc64f28d406e5d364a952718c0/fonttools-4.61.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba774b8cbd8754f54b8eb58124e8bd45f736b2743325ab1a5229698942b9b433", size = 2841801, upload-time = "2025-11-28T17:05:01.621Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/63/97b9c78e1f79bc741d4efe6e51f13872d8edb2b36e1b9fb2bab0d4491bb7/fonttools-4.61.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c84b430616ed73ce46e9cafd0bf0800e366a3e02fb7e1ad7c1e214dbe3862b1f", size = 2379024, upload-time = "2025-11-28T17:05:03.668Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/80/c87bc524a90dbeb2a390eea23eae448286983da59b7e02c67fa0ca96a8c5/fonttools-4.61.0-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b2b734d8391afe3c682320840c8191de9bd24e7eb85768dd4dc06ed1b63dbb1b", size = 4923706, upload-time = "2025-11-28T17:05:05.494Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/f6/a3b0374811a1de8c3f9207ec88f61ad1bb96f938ed89babae26c065c2e46/fonttools-4.61.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5c5fff72bf31b0e558ed085e4fd7ed96eb85881404ecc39ed2a779e7cf724eb", size = 4979751, upload-time = "2025-11-28T17:05:07.665Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/3b/30f63b4308b449091573285f9d27619563a84f399946bca3eadc9554afbe/fonttools-4.61.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:14a290c5c93fcab76b7f451e6a4b7721b712d90b3b5ed6908f1abcf794e90d6d", size = 4921113, upload-time = "2025-11-28T17:05:09.551Z" },
+ { url = "https://files.pythonhosted.org/packages/41/6c/58e6e9b7d9d8bf2d7010bd7bb493060b39b02a12d1cda64a8bfb116ce760/fonttools-4.61.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:13e3e20a5463bfeb77b3557d04b30bd6a96a6bb5c15c7b2e7908903e69d437a0", size = 5063183, upload-time = "2025-11-28T17:05:11.677Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/e3/52c790ab2b07492df059947a1fd7778e105aac5848c0473029a4d20481a2/fonttools-4.61.0-cp313-cp313-win32.whl", hash = "sha256:6781e7a4bb010be1cd69a29927b0305c86b843395f2613bdabe115f7d6ea7f34", size = 2263159, upload-time = "2025-11-28T17:05:13.292Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/1f/116013b200fbeba871046554d5d2a45fefa69a05c40e9cdfd0d4fff53edc/fonttools-4.61.0-cp313-cp313-win_amd64.whl", hash = "sha256:c53b47834ae41e8e4829171cc44fec0fdf125545a15f6da41776b926b9645a9a", size = 2313530, upload-time = "2025-11-28T17:05:14.848Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/99/59b1e25987787cb714aa9457cee4c9301b7c2153f0b673e2b8679d37669d/fonttools-4.61.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:96dfc9bc1f2302224e48e6ee37e656eddbab810b724b52e9d9c13a57a6abad01", size = 2841429, upload-time = "2025-11-28T17:05:16.671Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/b2/4c1911d4332c8a144bb3b44416e274ccca0e297157c971ea1b3fbb855590/fonttools-4.61.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3b2065d94e5d63aafc2591c8b6ccbdb511001d9619f1bca8ad39b745ebeb5efa", size = 2378987, upload-time = "2025-11-28T17:05:18.69Z" },
+ { url = "https://files.pythonhosted.org/packages/24/b0/f442e90fde5d2af2ae0cb54008ab6411edc557ee33b824e13e1d04925ac9/fonttools-4.61.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0d87e81e4d869549585ba0beb3f033718501c1095004f5e6aef598d13ebc216", size = 4873270, upload-time = "2025-11-28T17:05:20.625Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/04/f5d5990e33053c8a59b90b1d7e10ad9b97a73f42c745304da0e709635fab/fonttools-4.61.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cfa2eb9bae650e58f0e8ad53c49d19a844d6034d6b259f30f197238abc1ccee", size = 4968270, upload-time = "2025-11-28T17:05:22.515Z" },
+ { url = "https://files.pythonhosted.org/packages/94/9f/2091402e0d27c9c8c4bab5de0e5cd146d9609a2d7d1c666bbb75c0011c1a/fonttools-4.61.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4238120002e68296d55e091411c09eab94e111c8ce64716d17df53fd0eb3bb3d", size = 4919799, upload-time = "2025-11-28T17:05:24.437Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/72/86adab22fde710b829f8ffbc8f264df01928e5b7a8f6177fa29979ebf256/fonttools-4.61.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b6ceac262cc62bec01b3bb59abccf41b24ef6580869e306a4e88b7e56bb4bdda", size = 5030966, upload-time = "2025-11-28T17:05:26.115Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/a7/7c8e31b003349e845b853f5e0a67b95ff6b052fa4f5224f8b72624f5ac69/fonttools-4.61.0-cp314-cp314-win32.whl", hash = "sha256:adbb4ecee1a779469a77377bbe490565effe8fce6fb2e6f95f064de58f8bac85", size = 2267243, upload-time = "2025-11-28T17:05:27.807Z" },
+ { url = "https://files.pythonhosted.org/packages/20/ee/f434fe7749360497c52b7dcbcfdbccdaab0a71c59f19d572576066717122/fonttools-4.61.0-cp314-cp314-win_amd64.whl", hash = "sha256:02bdf8e04d1a70476564b8640380f04bb4ac74edc1fc71f1bacb840b3e398ee9", size = 2318822, upload-time = "2025-11-28T17:05:29.882Z" },
+ { url = "https://files.pythonhosted.org/packages/33/b3/c16255320255e5c1863ca2b2599bb61a46e2f566db0bbb9948615a8fe692/fonttools-4.61.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:627216062d90ab0d98215176d8b9562c4dd5b61271d35f130bcd30f6a8aaa33a", size = 2924917, upload-time = "2025-11-28T17:05:31.46Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/b8/08067ae21de705a817777c02ef36ab0b953cbe91d8adf134f9c2da75ed6d/fonttools-4.61.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:7b446623c9cd5f14a59493818eaa80255eec2468c27d2c01b56e05357c263195", size = 2413576, upload-time = "2025-11-28T17:05:33.343Z" },
+ { url = "https://files.pythonhosted.org/packages/42/f1/96ff43f92addce2356780fdc203f2966206f3d22ea20e242c27826fd7442/fonttools-4.61.0-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:70e2a0c0182ee75e493ef33061bfebf140ea57e035481d2f95aa03b66c7a0e05", size = 4877447, upload-time = "2025-11-28T17:05:35.278Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/1e/a3d8e51ed9ccfd7385e239ae374b78d258a0fb82d82cab99160a014a45d1/fonttools-4.61.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9064b0f55b947e929ac669af5311ab1f26f750214db6dd9a0c97e091e918f486", size = 5095681, upload-time = "2025-11-28T17:05:37.142Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/f6/d256bd6c1065c146a0bdddf1c62f542e08ae5b3405dbf3fcc52be272f674/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2cb5e45a824ce14b90510024d0d39dae51bd4fbb54c42a9334ea8c8cf4d95cbe", size = 4974140, upload-time = "2025-11-28T17:05:39.5Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/0c/96633eb4b26f138cc48561c6e0c44b4ea48acea56b20b507d6b14f8e80ce/fonttools-4.61.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6e5ca8c62efdec7972dfdfd454415c4db49b89aeaefaaacada432f3b7eea9866", size = 5001741, upload-time = "2025-11-28T17:05:41.424Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/9a/3b536bad3be4f26186f296e749ff17bad3e6d57232c104d752d24b2e265b/fonttools-4.61.0-cp314-cp314t-win32.whl", hash = "sha256:63c7125d31abe3e61d7bb917329b5543c5b3448db95f24081a13aaf064360fc8", size = 2330707, upload-time = "2025-11-28T17:05:43.548Z" },
+ { url = "https://files.pythonhosted.org/packages/18/ea/e6b9ac610451ee9f04477c311ad126de971f6112cb579fa391d2a8edb00b/fonttools-4.61.0-cp314-cp314t-win_amd64.whl", hash = "sha256:67d841aa272be5500de7f447c40d1d8452783af33b4c3599899319f6ef9ad3c1", size = 2395950, upload-time = "2025-11-28T17:05:45.638Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/14/634f7daea5ffe6a5f7a0322ba8e1a0e23c9257b80aa91458107896d1dfc7/fonttools-4.61.0-py3-none-any.whl", hash = "sha256:276f14c560e6f98d24ef7f5f44438e55ff5a67f78fa85236b218462c9f5d0635", size = 1144485, upload-time = "2025-11-28T17:05:47.573Z" },
+]
+
+[[package]]
+name = "geopandas"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+ { name = "packaging" },
+ { name = "pandas" },
+ { name = "pyogrio" },
+ { name = "pyproj" },
+ { name = "shapely" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8c/76/e1960ba846f153ab109575242abf89dc98f8e057faa32f3decf4cce9247a/geopandas-1.1.1.tar.gz", hash = "sha256:1745713f64d095c43e72e08e753dbd271678254b24f2e01db8cdb8debe1d293d", size = 332655, upload-time = "2025-06-26T21:04:56.57Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0b/70/d5cd0696eff08e62fdbdebe5b46527facb4e7220eabe0ac6225efab50168/geopandas-1.1.1-py3-none-any.whl", hash = "sha256:589e61aaf39b19828843df16cb90234e72897e2579be236f10eee0d052ad98e8", size = 338365, upload-time = "2025-06-26T21:04:55.139Z" },
+]
+
+[[package]]
+name = "gerrychain"
+version = "1.0.0a2"
+source = { editable = "." }
+dependencies = [
+ { name = "geopandas" },
+ { name = "networkx" },
+ { name = "numpy" },
+ { name = "pandas" },
+ { name = "rustworkx" },
+ { name = "scipy" },
+ { name = "shapely" },
+ { name = "tqdm" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "autopep8" },
+ { name = "black" },
+ { name = "ipykernel" },
+ { name = "ipywidgets" },
+ { name = "isort" },
+ { name = "myst-parser" },
+ { name = "pre-commit" },
+ { name = "pytest" },
+ { name = "pytest-cov" },
+]
+docs = [
+ { name = "myst-parser" },
+ { name = "recommonmark" },
+ { name = "sphinx" },
+ { name = "sphinx-copybutton" },
+ { name = "sphinx-rtd-theme" },
+]
+frm = [
+ { name = "matplotlib" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "geopandas", specifier = ">=1.1.1" },
+ { name = "networkx", specifier = ">=3.6.1" },
+ { name = "numpy", specifier = ">=2.3.5" },
+ { name = "pandas", specifier = ">=2.3.3" },
+ { name = "rustworkx", specifier = ">=0.17.1" },
+ { name = "scipy", specifier = ">=1.16.3" },
+ { name = "shapely", specifier = ">=2.1.2" },
+ { name = "tqdm", specifier = ">=4.67.1" },
+]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "autopep8", specifier = ">=2.3.2" },
+ { name = "black", specifier = ">=25.1.0" },
+ { name = "ipykernel", specifier = ">=6.30.1" },
+ { name = "ipywidgets", specifier = ">=8.1.7" },
+ { name = "isort", specifier = ">=6.0.1" },
+ { name = "myst-parser", specifier = ">=4.0.1" },
+ { name = "pre-commit", specifier = ">=4.3.0" },
+ { name = "pytest", specifier = ">=8.4.2" },
+ { name = "pytest-cov", specifier = ">=6.3.0" },
+]
+docs = [
+ { name = "myst-parser", specifier = ">=4.0.1" },
+ { name = "recommonmark", specifier = ">=0.7.1" },
+ { name = "sphinx", specifier = ">=8.2.3" },
+ { name = "sphinx-copybutton", specifier = ">=0.5.2" },
+ { name = "sphinx-rtd-theme", specifier = ">=3.0.2" },
+]
+frm = [{ name = "matplotlib", specifier = ">=3.10.7" }]
+
+[[package]]
+name = "identify"
+version = "2.6.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
+]
+
+[[package]]
+name = "idna"
+version = "3.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" },
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
+]
+
+[[package]]
+name = "ipykernel"
+version = "7.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "appnope", marker = "sys_platform == 'darwin'" },
+ { name = "comm" },
+ { name = "debugpy" },
+ { name = "ipython" },
+ { name = "jupyter-client" },
+ { name = "jupyter-core" },
+ { name = "matplotlib-inline" },
+ { name = "nest-asyncio" },
+ { name = "packaging" },
+ { name = "psutil" },
+ { name = "pyzmq" },
+ { name = "tornado" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" },
+]
+
+[[package]]
+name = "ipython"
+version = "9.8.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "decorator" },
+ { name = "ipython-pygments-lexers" },
+ { name = "jedi" },
+ { name = "matplotlib-inline" },
+ { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
+ { name = "prompt-toolkit" },
+ { name = "pygments" },
+ { name = "stack-data" },
+ { name = "traitlets" },
+ { name = "typing-extensions", marker = "python_full_version < '3.12'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/12/51/a703c030f4928646d390b4971af4938a1b10c9dfce694f0d99a0bb073cb2/ipython-9.8.0.tar.gz", hash = "sha256:8e4ce129a627eb9dd221c41b1d2cdaed4ef7c9da8c17c63f6f578fe231141f83", size = 4424940, upload-time = "2025-12-03T10:18:24.353Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/df/8ee1c5dd1e3308b5d5b2f2dfea323bb2f3827da8d654abb6642051199049/ipython-9.8.0-py3-none-any.whl", hash = "sha256:ebe6d1d58d7d988fbf23ff8ff6d8e1622cfdb194daf4b7b73b792c4ec3b85385", size = 621374, upload-time = "2025-12-03T10:18:22.335Z" },
+]
+
+[[package]]
+name = "ipython-pygments-lexers"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
+]
+
+[[package]]
+name = "ipywidgets"
+version = "8.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "comm" },
+ { name = "ipython" },
+ { name = "jupyterlab-widgets" },
+ { name = "traitlets" },
+ { name = "widgetsnbextension" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4c/ae/c5ce1edc1afe042eadb445e95b0671b03cee61895264357956e61c0d2ac0/ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668", size = 116739, upload-time = "2025-11-01T21:18:12.393Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/56/6d/0d9848617b9f753b87f214f1c682592f7ca42de085f564352f10f0843026/ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e", size = 139808, upload-time = "2025-11-01T21:18:10.956Z" },
+]
+
+[[package]]
+name = "isort"
+version = "7.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" },
+]
+
+[[package]]
+name = "jedi"
+version = "0.19.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "parso" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.6"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markupsafe" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
+]
+
+[[package]]
+name = "jupyter-client"
+version = "8.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-core" },
+ { name = "python-dateutil" },
+ { name = "pyzmq" },
+ { name = "tornado" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a6/27/d10de45e8ad4ce872372c4a3a37b7b35b6b064f6f023a5c14ffcced4d59d/jupyter_client-8.7.0.tar.gz", hash = "sha256:3357212d9cbe01209e59190f67a3a7e1f387a4f4e88d1e0433ad84d7b262531d", size = 344691, upload-time = "2025-12-09T18:37:01.953Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bb/f5/fddaec430367be9d62a7ed125530e133bfd4a1c0350fe221149ee0f2b526/jupyter_client-8.7.0-py3-none-any.whl", hash = "sha256:3671a94fd25e62f5f2f554f5e95389c2294d89822378a5f2dd24353e1494a9e0", size = 106215, upload-time = "2025-12-09T18:37:00.024Z" },
+]
+
+[[package]]
+name = "jupyter-core"
+version = "5.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "platformdirs" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/02/49/9d1284d0dc65e2c757b74c6687b6d319b02f822ad039e5c512df9194d9dd/jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508", size = 89814, upload-time = "2025-10-16T19:19:18.444Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e7/e7/80988e32bf6f73919a113473a604f5a8f09094de312b9d52b79c2df7612b/jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407", size = 29032, upload-time = "2025-10-16T19:19:16.783Z" },
+]
+
+[[package]]
+name = "jupyterlab-widgets"
+version = "3.0.16"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/26/2d/ef58fed122b268c69c0aa099da20bc67657cdfb2e222688d5731bd5b971d/jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0", size = 897423, upload-time = "2025-11-01T21:11:29.724Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ab/b5/36c712098e6191d1b4e349304ef73a8d06aed77e56ceaac8c0a306c7bda1/jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8", size = 914926, upload-time = "2025-11-01T21:11:28.008Z" },
+]
+
+[[package]]
+name = "kiwisolver"
+version = "1.4.9"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" },
+ { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" },
+ { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" },
+ { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" },
+ { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" },
+ { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" },
+ { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" },
+ { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" },
+ { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" },
+ { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" },
+ { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" },
+ { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" },
+ { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" },
+ { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" },
+ { url = "https://files.pythonhosted.org/packages/31/c1/c2686cda909742ab66c7388e9a1a8521a59eb89f8bcfbee28fc980d07e24/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8", size = 123681, upload-time = "2025-08-10T21:26:26.725Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/f0/f44f50c9f5b1a1860261092e3bc91ecdc9acda848a8b8c6abfda4a24dd5c/kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2", size = 66464, upload-time = "2025-08-10T21:26:27.733Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/7a/9d90a151f558e29c3936b8a47ac770235f436f2120aca41a6d5f3d62ae8d/kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f", size = 64961, upload-time = "2025-08-10T21:26:28.729Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/e9/f218a2cb3a9ffbe324ca29a9e399fa2d2866d7f348ec3a88df87fc248fc5/kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098", size = 1474607, upload-time = "2025-08-10T21:26:29.798Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/28/aac26d4c882f14de59041636292bc838db8961373825df23b8eeb807e198/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed", size = 1276546, upload-time = "2025-08-10T21:26:31.401Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/ad/8bfc1c93d4cc565e5069162f610ba2f48ff39b7de4b5b8d93f69f30c4bed/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525", size = 1294482, upload-time = "2025-08-10T21:26:32.721Z" },
+ { url = "https://files.pythonhosted.org/packages/da/f1/6aca55ff798901d8ce403206d00e033191f63d82dd708a186e0ed2067e9c/kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78", size = 1343720, upload-time = "2025-08-10T21:26:34.032Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/91/eed031876c595c81d90d0f6fc681ece250e14bf6998c3d7c419466b523b7/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b", size = 2224907, upload-time = "2025-08-10T21:26:35.824Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/ec/4d1925f2e49617b9cca9c34bfa11adefad49d00db038e692a559454dfb2e/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799", size = 2321334, upload-time = "2025-08-10T21:26:37.534Z" },
+ { url = "https://files.pythonhosted.org/packages/43/cb/450cd4499356f68802750c6ddc18647b8ea01ffa28f50d20598e0befe6e9/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3", size = 2488313, upload-time = "2025-08-10T21:26:39.191Z" },
+ { url = "https://files.pythonhosted.org/packages/71/67/fc76242bd99f885651128a5d4fa6083e5524694b7c88b489b1b55fdc491d/kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c", size = 2291970, upload-time = "2025-08-10T21:26:40.828Z" },
+ { url = "https://files.pythonhosted.org/packages/75/bd/f1a5d894000941739f2ae1b65a32892349423ad49c2e6d0771d0bad3fae4/kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d", size = 73894, upload-time = "2025-08-10T21:26:42.33Z" },
+ { url = "https://files.pythonhosted.org/packages/95/38/dce480814d25b99a391abbddadc78f7c117c6da34be68ca8b02d5848b424/kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2", size = 64995, upload-time = "2025-08-10T21:26:43.889Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/37/7d218ce5d92dadc5ebdd9070d903e0c7cf7edfe03f179433ac4d13ce659c/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1", size = 126510, upload-time = "2025-08-10T21:26:44.915Z" },
+ { url = "https://files.pythonhosted.org/packages/23/b0/e85a2b48233daef4b648fb657ebbb6f8367696a2d9548a00b4ee0eb67803/kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1", size = 67903, upload-time = "2025-08-10T21:26:45.934Z" },
+ { url = "https://files.pythonhosted.org/packages/44/98/f2425bc0113ad7de24da6bb4dae1343476e95e1d738be7c04d31a5d037fd/kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11", size = 66402, upload-time = "2025-08-10T21:26:47.101Z" },
+ { url = "https://files.pythonhosted.org/packages/98/d8/594657886df9f34c4177cc353cc28ca7e6e5eb562d37ccc233bff43bbe2a/kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c", size = 1582135, upload-time = "2025-08-10T21:26:48.665Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/c6/38a115b7170f8b306fc929e166340c24958347308ea3012c2b44e7e295db/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197", size = 1389409, upload-time = "2025-08-10T21:26:50.335Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/3b/e04883dace81f24a568bcee6eb3001da4ba05114afa622ec9b6fafdc1f5e/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c", size = 1401763, upload-time = "2025-08-10T21:26:51.867Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/80/20ace48e33408947af49d7d15c341eaee69e4e0304aab4b7660e234d6288/kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185", size = 1453643, upload-time = "2025-08-10T21:26:53.592Z" },
+ { url = "https://files.pythonhosted.org/packages/64/31/6ce4380a4cd1f515bdda976a1e90e547ccd47b67a1546d63884463c92ca9/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748", size = 2330818, upload-time = "2025-08-10T21:26:55.051Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/e9/3f3fcba3bcc7432c795b82646306e822f3fd74df0ee81f0fa067a1f95668/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64", size = 2419963, upload-time = "2025-08-10T21:26:56.421Z" },
+ { url = "https://files.pythonhosted.org/packages/99/43/7320c50e4133575c66e9f7dadead35ab22d7c012a3b09bb35647792b2a6d/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff", size = 2594639, upload-time = "2025-08-10T21:26:57.882Z" },
+ { url = "https://files.pythonhosted.org/packages/65/d6/17ae4a270d4a987ef8a385b906d2bdfc9fce502d6dc0d3aea865b47f548c/kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07", size = 2391741, upload-time = "2025-08-10T21:26:59.237Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/8f/8f6f491d595a9e5912971f3f863d81baddccc8a4d0c3749d6a0dd9ffc9df/kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c", size = 68646, upload-time = "2025-08-10T21:27:00.52Z" },
+ { url = "https://files.pythonhosted.org/packages/6b/32/6cc0fbc9c54d06c2969faa9c1d29f5751a2e51809dd55c69055e62d9b426/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386", size = 123806, upload-time = "2025-08-10T21:27:01.537Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/dd/2bfb1d4a4823d92e8cbb420fe024b8d2167f72079b3bb941207c42570bdf/kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552", size = 66605, upload-time = "2025-08-10T21:27:03.335Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/69/00aafdb4e4509c2ca6064646cba9cd4b37933898f426756adb2cb92ebbed/kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3", size = 64925, upload-time = "2025-08-10T21:27:04.339Z" },
+ { url = "https://files.pythonhosted.org/packages/43/dc/51acc6791aa14e5cb6d8a2e28cefb0dc2886d8862795449d021334c0df20/kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58", size = 1472414, upload-time = "2025-08-10T21:27:05.437Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/bb/93fa64a81db304ac8a246f834d5094fae4b13baf53c839d6bb6e81177129/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4", size = 1281272, upload-time = "2025-08-10T21:27:07.063Z" },
+ { url = "https://files.pythonhosted.org/packages/70/e6/6df102916960fb8d05069d4bd92d6d9a8202d5a3e2444494e7cd50f65b7a/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df", size = 1298578, upload-time = "2025-08-10T21:27:08.452Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/47/e142aaa612f5343736b087864dbaebc53ea8831453fb47e7521fa8658f30/kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6", size = 1345607, upload-time = "2025-08-10T21:27:10.125Z" },
+ { url = "https://files.pythonhosted.org/packages/54/89/d641a746194a0f4d1a3670fb900d0dbaa786fb98341056814bc3f058fa52/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5", size = 2230150, upload-time = "2025-08-10T21:27:11.484Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/6b/5ee1207198febdf16ac11f78c5ae40861b809cbe0e6d2a8d5b0b3044b199/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf", size = 2325979, upload-time = "2025-08-10T21:27:12.917Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/ff/b269eefd90f4ae14dcc74973d5a0f6d28d3b9bb1afd8c0340513afe6b39a/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5", size = 2491456, upload-time = "2025-08-10T21:27:14.353Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/d4/10303190bd4d30de547534601e259a4fbf014eed94aae3e5521129215086/kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce", size = 2294621, upload-time = "2025-08-10T21:27:15.808Z" },
+ { url = "https://files.pythonhosted.org/packages/28/e0/a9a90416fce5c0be25742729c2ea52105d62eda6c4be4d803c2a7be1fa50/kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7", size = 75417, upload-time = "2025-08-10T21:27:17.436Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/10/6949958215b7a9a264299a7db195564e87900f709db9245e4ebdd3c70779/kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c", size = 66582, upload-time = "2025-08-10T21:27:18.436Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/79/60e53067903d3bc5469b369fe0dfc6b3482e2133e85dae9daa9527535991/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548", size = 126514, upload-time = "2025-08-10T21:27:19.465Z" },
+ { url = "https://files.pythonhosted.org/packages/25/d1/4843d3e8d46b072c12a38c97c57fab4608d36e13fe47d47ee96b4d61ba6f/kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d", size = 67905, upload-time = "2025-08-10T21:27:20.51Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/ae/29ffcbd239aea8b93108de1278271ae764dfc0d803a5693914975f200596/kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c", size = 66399, upload-time = "2025-08-10T21:27:21.496Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/ae/d7ba902aa604152c2ceba5d352d7b62106bedbccc8e95c3934d94472bfa3/kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122", size = 1582197, upload-time = "2025-08-10T21:27:22.604Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/41/27c70d427eddb8bc7e4f16420a20fefc6f480312122a59a959fdfe0445ad/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64", size = 1390125, upload-time = "2025-08-10T21:27:24.036Z" },
+ { url = "https://files.pythonhosted.org/packages/41/42/b3799a12bafc76d962ad69083f8b43b12bf4fe78b097b12e105d75c9b8f1/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134", size = 1402612, upload-time = "2025-08-10T21:27:25.773Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/b5/a210ea073ea1cfaca1bb5c55a62307d8252f531beb364e18aa1e0888b5a0/kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370", size = 1453990, upload-time = "2025-08-10T21:27:27.089Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/ce/a829eb8c033e977d7ea03ed32fb3c1781b4fa0433fbadfff29e39c676f32/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21", size = 2331601, upload-time = "2025-08-10T21:27:29.343Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/4b/b5e97eb142eb9cd0072dacfcdcd31b1c66dc7352b0f7c7255d339c0edf00/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a", size = 2422041, upload-time = "2025-08-10T21:27:30.754Z" },
+ { url = "https://files.pythonhosted.org/packages/40/be/8eb4cd53e1b85ba4edc3a9321666f12b83113a178845593307a3e7891f44/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f", size = 2594897, upload-time = "2025-08-10T21:27:32.803Z" },
+ { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" },
+ { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" },
+ { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" },
+ { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" },
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mdurl" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" },
+]
+
+[[package]]
+name = "markupsafe"
+version = "3.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" },
+ { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" },
+ { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" },
+ { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" },
+ { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" },
+ { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
+ { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
+ { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
+ { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
+ { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
+ { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
+ { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
+ { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
+ { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
+ { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
+ { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
+ { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
+ { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" },
+ { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" },
+ { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" },
+ { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" },
+ { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" },
+ { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" },
+ { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" },
+ { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" },
+ { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" },
+ { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" },
+ { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
+]
+
+[[package]]
+name = "matplotlib"
+version = "3.10.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "contourpy" },
+ { name = "cycler" },
+ { name = "fonttools" },
+ { name = "kiwisolver" },
+ { name = "numpy" },
+ { name = "packaging" },
+ { name = "pillow" },
+ { name = "pyparsing" },
+ { name = "python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ae/e2/d2d5295be2f44c678ebaf3544ba32d20c1f9ef08c49fe47f496180e1db15/matplotlib-3.10.7.tar.gz", hash = "sha256:a06ba7e2a2ef9131c79c49e63dad355d2d878413a0376c1727c8b9335ff731c7", size = 34804865, upload-time = "2025-10-09T00:28:00.669Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fc/bc/0fb489005669127ec13f51be0c6adc074d7cf191075dab1da9fe3b7a3cfc/matplotlib-3.10.7-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:53b492410a6cd66c7a471de6c924f6ede976e963c0f3097a3b7abfadddc67d0a", size = 8257507, upload-time = "2025-10-09T00:26:19.073Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/6a/d42588ad895279ff6708924645b5d2ed54a7fb2dc045c8a804e955aeace1/matplotlib-3.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d9749313deb729f08207718d29c86246beb2ea3fdba753595b55901dee5d2fd6", size = 8119565, upload-time = "2025-10-09T00:26:21.023Z" },
+ { url = "https://files.pythonhosted.org/packages/10/b7/4aa196155b4d846bd749cf82aa5a4c300cf55a8b5e0dfa5b722a63c0f8a0/matplotlib-3.10.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2222c7ba2cbde7fe63032769f6eb7e83ab3227f47d997a8453377709b7fe3a5a", size = 8692668, upload-time = "2025-10-09T00:26:22.967Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/e7/664d2b97016f46683a02d854d730cfcf54ff92c1dafa424beebef50f831d/matplotlib-3.10.7-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e91f61a064c92c307c5a9dc8c05dc9f8a68f0a3be199d9a002a0622e13f874a1", size = 9521051, upload-time = "2025-10-09T00:26:25.041Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/a3/37aef1404efa615f49b5758a5e0261c16dd88f389bc1861e722620e4a754/matplotlib-3.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f1851eab59ca082c95df5a500106bad73672645625e04538b3ad0f69471ffcc", size = 9576878, upload-time = "2025-10-09T00:26:27.478Z" },
+ { url = "https://files.pythonhosted.org/packages/33/cd/b145f9797126f3f809d177ca378de57c45413c5099c5990de2658760594a/matplotlib-3.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:6516ce375109c60ceec579e699524e9d504cd7578506f01150f7a6bc174a775e", size = 8115142, upload-time = "2025-10-09T00:26:29.774Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/39/63bca9d2b78455ed497fcf51a9c71df200a11048f48249038f06447fa947/matplotlib-3.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:b172db79759f5f9bc13ef1c3ef8b9ee7b37b0247f987fbbbdaa15e4f87fd46a9", size = 7992439, upload-time = "2025-10-09T00:26:40.32Z" },
+ { url = "https://files.pythonhosted.org/packages/be/b3/09eb0f7796932826ec20c25b517d568627754f6c6462fca19e12c02f2e12/matplotlib-3.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a0edb7209e21840e8361e91ea84ea676658aa93edd5f8762793dec77a4a6748", size = 8272389, upload-time = "2025-10-09T00:26:42.474Z" },
+ { url = "https://files.pythonhosted.org/packages/11/0b/1ae80ddafb8652fd8046cb5c8460ecc8d4afccb89e2c6d6bec61e04e1eaf/matplotlib-3.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c380371d3c23e0eadf8ebff114445b9f970aff2010198d498d4ab4c3b41eea4f", size = 8128247, upload-time = "2025-10-09T00:26:44.77Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/18/95ae2e242d4a5c98bd6e90e36e128d71cf1c7e39b0874feaed3ef782e789/matplotlib-3.10.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d5f256d49fea31f40f166a5e3131235a5d2f4b7f44520b1cf0baf1ce568ccff0", size = 8696996, upload-time = "2025-10-09T00:26:46.792Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/3d/5b559efc800bd05cb2033aa85f7e13af51958136a48327f7c261801ff90a/matplotlib-3.10.7-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11ae579ac83cdf3fb72573bb89f70e0534de05266728740d478f0f818983c695", size = 9530153, upload-time = "2025-10-09T00:26:49.07Z" },
+ { url = "https://files.pythonhosted.org/packages/88/57/eab4a719fd110312d3c220595d63a3c85ec2a39723f0f4e7fa7e6e3f74ba/matplotlib-3.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4c14b6acd16cddc3569a2d515cfdd81c7a68ac5639b76548cfc1a9e48b20eb65", size = 9593093, upload-time = "2025-10-09T00:26:51.067Z" },
+ { url = "https://files.pythonhosted.org/packages/31/3c/80816f027b3a4a28cd2a0a6ef7f89a2db22310e945cd886ec25bfb399221/matplotlib-3.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:0d8c32b7ea6fb80b1aeff5a2ceb3fb9778e2759e899d9beff75584714afcc5ee", size = 8122771, upload-time = "2025-10-09T00:26:53.296Z" },
+ { url = "https://files.pythonhosted.org/packages/de/77/ef1fc78bfe99999b2675435cc52120887191c566b25017d78beaabef7f2d/matplotlib-3.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:5f3f6d315dcc176ba7ca6e74c7768fb7e4cf566c49cb143f6bc257b62e634ed8", size = 7992812, upload-time = "2025-10-09T00:26:54.882Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9c/207547916a02c78f6bdd83448d9b21afbc42f6379ed887ecf610984f3b4e/matplotlib-3.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1d9d3713a237970569156cfb4de7533b7c4eacdd61789726f444f96a0d28f57f", size = 8273212, upload-time = "2025-10-09T00:26:56.752Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/d0/b3d3338d467d3fc937f0bb7f256711395cae6f78e22cef0656159950adf0/matplotlib-3.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37a1fea41153dd6ee061d21ab69c9cf2cf543160b1b85d89cd3d2e2a7902ca4c", size = 8128713, upload-time = "2025-10-09T00:26:59.001Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ff/6425bf5c20d79aa5b959d1ce9e65f599632345391381c9a104133fe0b171/matplotlib-3.10.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b3c4ea4948d93c9c29dc01c0c23eef66f2101bf75158c291b88de6525c55c3d1", size = 8698527, upload-time = "2025-10-09T00:27:00.69Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/7f/ccdca06f4c2e6c7989270ed7829b8679466682f4cfc0f8c9986241c023b6/matplotlib-3.10.7-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22df30ffaa89f6643206cf13877191c63a50e8f800b038bc39bee9d2d4957632", size = 9529690, upload-time = "2025-10-09T00:27:02.664Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/95/b80fc2c1f269f21ff3d193ca697358e24408c33ce2b106a7438a45407b63/matplotlib-3.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b69676845a0a66f9da30e87f48be36734d6748024b525ec4710be40194282c84", size = 9593732, upload-time = "2025-10-09T00:27:04.653Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/b6/23064a96308b9aeceeffa65e96bcde459a2ea4934d311dee20afde7407a0/matplotlib-3.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:744991e0cc863dd669c8dc9136ca4e6e0082be2070b9d793cbd64bec872a6815", size = 8122727, upload-time = "2025-10-09T00:27:06.814Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/a6/2faaf48133b82cf3607759027f82b5c702aa99cdfcefb7f93d6ccf26a424/matplotlib-3.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:fba2974df0bf8ce3c995fa84b79cde38326e0f7b5409e7a3a481c1141340bcf7", size = 7992958, upload-time = "2025-10-09T00:27:08.567Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/f0/b018fed0b599bd48d84c08794cb242227fe3341952da102ee9d9682db574/matplotlib-3.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:932c55d1fa7af4423422cb6a492a31cbcbdbe68fd1a9a3f545aa5e7a143b5355", size = 8316849, upload-time = "2025-10-09T00:27:10.254Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/b7/bb4f23856197659f275e11a2a164e36e65e9b48ea3e93c4ec25b4f163198/matplotlib-3.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e38c2d581d62ee729a6e144c47a71b3f42fb4187508dbbf4fe71d5612c3433b", size = 8178225, upload-time = "2025-10-09T00:27:12.241Z" },
+ { url = "https://files.pythonhosted.org/packages/62/56/0600609893ff277e6f3ab3c0cef4eafa6e61006c058e84286c467223d4d5/matplotlib-3.10.7-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:786656bb13c237bbcebcd402f65f44dd61ead60ee3deb045af429d889c8dbc67", size = 8711708, upload-time = "2025-10-09T00:27:13.879Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/1a/6bfecb0cafe94d6658f2f1af22c43b76cf7a1c2f0dc34ef84cbb6809617e/matplotlib-3.10.7-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09d7945a70ea43bf9248f4b6582734c2fe726723204a76eca233f24cffc7ef67", size = 9541409, upload-time = "2025-10-09T00:27:15.684Z" },
+ { url = "https://files.pythonhosted.org/packages/08/50/95122a407d7f2e446fd865e2388a232a23f2b81934960ea802f3171518e4/matplotlib-3.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d0b181e9fa8daf1d9f2d4c547527b167cb8838fc587deabca7b5c01f97199e84", size = 9594054, upload-time = "2025-10-09T00:27:17.547Z" },
+ { url = "https://files.pythonhosted.org/packages/13/76/75b194a43b81583478a81e78a07da8d9ca6ddf50dd0a2ccabf258059481d/matplotlib-3.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:31963603041634ce1a96053047b40961f7a29eb8f9a62e80cc2c0427aa1d22a2", size = 8200100, upload-time = "2025-10-09T00:27:20.039Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/9e/6aefebdc9f8235c12bdeeda44cc0383d89c1e41da2c400caf3ee2073a3ce/matplotlib-3.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:aebed7b50aa6ac698c90f60f854b47e48cd2252b30510e7a1feddaf5a3f72cbf", size = 8042131, upload-time = "2025-10-09T00:27:21.608Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/4b/e5bc2c321b6a7e3a75638d937d19ea267c34bd5a90e12bee76c4d7c7a0d9/matplotlib-3.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d883460c43e8c6b173fef244a2341f7f7c0e9725c7fe68306e8e44ed9c8fb100", size = 8273787, upload-time = "2025-10-09T00:27:23.27Z" },
+ { url = "https://files.pythonhosted.org/packages/86/ad/6efae459c56c2fbc404da154e13e3a6039129f3c942b0152624f1c621f05/matplotlib-3.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07124afcf7a6504eafcb8ce94091c5898bbdd351519a1beb5c45f7a38c67e77f", size = 8131348, upload-time = "2025-10-09T00:27:24.926Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/5a/a4284d2958dee4116359cc05d7e19c057e64ece1b4ac986ab0f2f4d52d5a/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c17398b709a6cce3d9fdb1595c33e356d91c098cd9486cb2cc21ea2ea418e715", size = 9533949, upload-time = "2025-10-09T00:27:26.704Z" },
+ { url = "https://files.pythonhosted.org/packages/de/ff/f3781b5057fa3786623ad8976fc9f7b0d02b2f28534751fd5a44240de4cf/matplotlib-3.10.7-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7146d64f561498764561e9cd0ed64fcf582e570fc519e6f521e2d0cfd43365e1", size = 9804247, upload-time = "2025-10-09T00:27:28.514Z" },
+ { url = "https://files.pythonhosted.org/packages/47/5a/993a59facb8444efb0e197bf55f545ee449902dcee86a4dfc580c3b61314/matplotlib-3.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:90ad854c0a435da3104c01e2c6f0028d7e719b690998a2333d7218db80950722", size = 9595497, upload-time = "2025-10-09T00:27:30.418Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/a5/77c95aaa9bb32c345cbb49626ad8eb15550cba2e6d4c88081a6c2ac7b08d/matplotlib-3.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:4645fc5d9d20ffa3a39361fcdbcec731382763b623b72627806bf251b6388866", size = 8252732, upload-time = "2025-10-09T00:27:32.332Z" },
+ { url = "https://files.pythonhosted.org/packages/74/04/45d269b4268d222390d7817dae77b159651909669a34ee9fdee336db5883/matplotlib-3.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:9257be2f2a03415f9105c486d304a321168e61ad450f6153d77c69504ad764bb", size = 8124240, upload-time = "2025-10-09T00:27:33.94Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/c7/ca01c607bb827158b439208c153d6f14ddb9fb640768f06f7ca3488ae67b/matplotlib-3.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1e4bbad66c177a8fdfa53972e5ef8be72a5f27e6a607cec0d8579abd0f3102b1", size = 8316938, upload-time = "2025-10-09T00:27:35.534Z" },
+ { url = "https://files.pythonhosted.org/packages/84/d2/5539e66e9f56d2fdec94bb8436f5e449683b4e199bcc897c44fbe3c99e28/matplotlib-3.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d8eb7194b084b12feb19142262165832fc6ee879b945491d1c3d4660748020c4", size = 8178245, upload-time = "2025-10-09T00:27:37.334Z" },
+ { url = "https://files.pythonhosted.org/packages/77/b5/e6ca22901fd3e4fe433a82e583436dd872f6c966fca7e63cf806b40356f8/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d41379b05528091f00e1728004f9a8d7191260f3862178b88e8fd770206318", size = 9541411, upload-time = "2025-10-09T00:27:39.387Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/99/a4524db57cad8fee54b7237239a8f8360bfcfa3170d37c9e71c090c0f409/matplotlib-3.10.7-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4a74f79fafb2e177f240579bc83f0b60f82cc47d2f1d260f422a0627207008ca", size = 9803664, upload-time = "2025-10-09T00:27:41.492Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/a5/85e2edf76ea0ad4288d174926d9454ea85f3ce5390cc4e6fab196cbf250b/matplotlib-3.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:702590829c30aada1e8cef0568ddbffa77ca747b4d6e36c6d173f66e301f89cc", size = 9594066, upload-time = "2025-10-09T00:27:43.694Z" },
+ { url = "https://files.pythonhosted.org/packages/39/69/9684368a314f6d83fe5c5ad2a4121a3a8e03723d2e5c8ea17b66c1bad0e7/matplotlib-3.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:f79d5de970fc90cd5591f60053aecfce1fcd736e0303d9f0bf86be649fa68fb8", size = 8342832, upload-time = "2025-10-09T00:27:45.543Z" },
+ { url = "https://files.pythonhosted.org/packages/04/5f/e22e08da14bc1a0894184640d47819d2338b792732e20d292bf86e5ab785/matplotlib-3.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:cb783436e47fcf82064baca52ce748af71725d0352e1d31564cbe9c95df92b9c", size = 8172585, upload-time = "2025-10-09T00:27:47.185Z" },
+ { url = "https://files.pythonhosted.org/packages/58/8f/76d5dc21ac64a49e5498d7f0472c0781dae442dd266a67458baec38288ec/matplotlib-3.10.7-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:15112bcbaef211bd663fa935ec33313b948e214454d949b723998a43357b17b0", size = 8252283, upload-time = "2025-10-09T00:27:54.739Z" },
+ { url = "https://files.pythonhosted.org/packages/27/0d/9c5d4c2317feb31d819e38c9f947c942f42ebd4eb935fc6fd3518a11eaa7/matplotlib-3.10.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d2a959c640cdeecdd2ec3136e8ea0441da59bcaf58d67e9c590740addba2cb68", size = 8116733, upload-time = "2025-10-09T00:27:56.406Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/cc/3fe688ff1355010937713164caacf9ed443675ac48a997bab6ed23b3f7c0/matplotlib-3.10.7-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3886e47f64611046bc1db523a09dd0a0a6bed6081e6f90e13806dd1d1d1b5e91", size = 8693919, upload-time = "2025-10-09T00:27:58.41Z" },
+]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.2.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" },
+]
+
+[[package]]
+name = "mdit-py-plugins"
+version = "0.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "markdown-it-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" },
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
+]
+
+[[package]]
+name = "myst-parser"
+version = "4.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "docutils" },
+ { name = "jinja2" },
+ { name = "markdown-it-py" },
+ { name = "mdit-py-plugins" },
+ { name = "pyyaml" },
+ { name = "sphinx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" },
+]
+
+[[package]]
+name = "nest-asyncio"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
+]
+
+[[package]]
+name = "networkx"
+version = "3.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" },
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.9.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
+]
+
+[[package]]
+name = "numpy"
+version = "2.3.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/43/77/84dd1d2e34d7e2792a236ba180b5e8fcc1e3e414e761ce0253f63d7f572e/numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10", size = 17034641, upload-time = "2025-11-16T22:49:19.336Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/ea/25e26fa5837106cde46ae7d0b667e20f69cbbc0efd64cba8221411ab26ae/numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218", size = 12528324, upload-time = "2025-11-16T22:49:22.582Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/1a/e85f0eea4cf03d6a0228f5c0256b53f2df4bc794706e7df019fc622e47f1/numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d", size = 5356872, upload-time = "2025-11-16T22:49:25.408Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/bb/35ef04afd567f4c989c2060cde39211e4ac5357155c1833bcd1166055c61/numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5", size = 6893148, upload-time = "2025-11-16T22:49:27.549Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/2b/05bbeb06e2dff5eab512dfc678b1cc5ee94d8ac5956a0885c64b6b26252b/numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7", size = 14557282, upload-time = "2025-11-16T22:49:30.964Z" },
+ { url = "https://files.pythonhosted.org/packages/65/fb/2b23769462b34398d9326081fad5655198fcf18966fcb1f1e49db44fbf31/numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4", size = 16897903, upload-time = "2025-11-16T22:49:34.191Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/14/085f4cf05fc3f1e8aa95e85404e984ffca9b2275a5dc2b1aae18a67538b8/numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e", size = 16341672, upload-time = "2025-11-16T22:49:37.2Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/3b/1f73994904142b2aa290449b3bb99772477b5fd94d787093e4f24f5af763/numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748", size = 18838896, upload-time = "2025-11-16T22:49:39.727Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/b9/cf6649b2124f288309ffc353070792caf42ad69047dcc60da85ee85fea58/numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c", size = 6563608, upload-time = "2025-11-16T22:49:42.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/44/9fe81ae1dcc29c531843852e2874080dc441338574ccc4306b39e2ff6e59/numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c", size = 13078442, upload-time = "2025-11-16T22:49:43.99Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/a7/f99a41553d2da82a20a2f22e93c94f928e4490bb447c9ff3c4ff230581d3/numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa", size = 10458555, upload-time = "2025-11-16T22:49:47.092Z" },
+ { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" },
+ { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" },
+ { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" },
+ { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" },
+ { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" },
+ { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" },
+ { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" },
+ { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" },
+ { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" },
+ { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" },
+ { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" },
+ { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" },
+ { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" },
+ { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" },
+ { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" },
+ { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" },
+ { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706, upload-time = "2025-11-16T22:51:19.558Z" },
+ { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507, upload-time = "2025-11-16T22:51:22.492Z" },
+ { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049, upload-time = "2025-11-16T22:51:25.171Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603, upload-time = "2025-11-16T22:51:27Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696, upload-time = "2025-11-16T22:51:29.402Z" },
+ { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350, upload-time = "2025-11-16T22:51:32.167Z" },
+ { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190, upload-time = "2025-11-16T22:51:35.403Z" },
+ { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749, upload-time = "2025-11-16T22:51:39.698Z" },
+ { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432, upload-time = "2025-11-16T22:51:42.476Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388, upload-time = "2025-11-16T22:51:45.275Z" },
+ { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651, upload-time = "2025-11-16T22:51:47.749Z" },
+ { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503, upload-time = "2025-11-16T22:51:50.443Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612, upload-time = "2025-11-16T22:51:53.609Z" },
+ { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042, upload-time = "2025-11-16T22:51:56.213Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502, upload-time = "2025-11-16T22:51:58.584Z" },
+ { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962, upload-time = "2025-11-16T22:52:01.698Z" },
+ { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054, upload-time = "2025-11-16T22:52:04.267Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613, upload-time = "2025-11-16T22:52:08.651Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147, upload-time = "2025-11-16T22:52:11.453Z" },
+ { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" },
+ { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/65/f9dea8e109371ade9c782b4e4756a82edf9d3366bca495d84d79859a0b79/numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310", size = 16910689, upload-time = "2025-11-16T22:52:23.247Z" },
+ { url = "https://files.pythonhosted.org/packages/00/4f/edb00032a8fb92ec0a679d3830368355da91a69cab6f3e9c21b64d0bb986/numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c", size = 12457053, upload-time = "2025-11-16T22:52:26.367Z" },
+ { url = "https://files.pythonhosted.org/packages/16/a4/e8a53b5abd500a63836a29ebe145fc1ab1f2eefe1cfe59276020373ae0aa/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18", size = 5285635, upload-time = "2025-11-16T22:52:29.266Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/2f/37eeb9014d9c8b3e9c55bc599c68263ca44fdbc12a93e45a21d1d56df737/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff", size = 6801770, upload-time = "2025-11-16T22:52:31.421Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/e4/68d2f474df2cb671b2b6c2986a02e520671295647dad82484cde80ca427b/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb", size = 14391768, upload-time = "2025-11-16T22:52:33.593Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/50/94ccd8a2b141cb50651fddd4f6a48874acb3c91c8f0842b08a6afc4b0b21/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7", size = 16729263, upload-time = "2025-11-16T22:52:36.369Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/ee/346fa473e666fe14c52fcdd19ec2424157290a032d4c41f98127bfb31ac7/numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425", size = 12967213, upload-time = "2025-11-16T22:52:39.38Z" },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+]
+
+[[package]]
+name = "pandas"
+version = "2.3.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+ { name = "python-dateutil" },
+ { name = "pytz" },
+ { name = "tzdata" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" },
+ { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" },
+ { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" },
+ { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" },
+ { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" },
+ { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" },
+ { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" },
+ { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" },
+ { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" },
+ { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" },
+ { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" },
+ { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" },
+ { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" },
+ { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" },
+ { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" },
+ { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" },
+ { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" },
+ { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" },
+ { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" },
+]
+
+[[package]]
+name = "parso"
+version = "0.8.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
+]
+
+[[package]]
+name = "pathspec"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
+]
+
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ptyprocess" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
+]
+
+[[package]]
+name = "pillow"
+version = "12.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0e/5a/a2f6773b64edb921a756eb0729068acad9fc5208a53f4a349396e9436721/pillow-12.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0fd00cac9c03256c8b2ff58f162ebcd2587ad3e1f2e397eab718c47e24d231cc", size = 5289798, upload-time = "2025-10-15T18:21:47.763Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/05/069b1f8a2e4b5a37493da6c5868531c3f77b85e716ad7a590ef87d58730d/pillow-12.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3475b96f5908b3b16c47533daaa87380c491357d197564e0ba34ae75c0f3257", size = 4650589, upload-time = "2025-10-15T18:21:49.515Z" },
+ { url = "https://files.pythonhosted.org/packages/61/e3/2c820d6e9a36432503ead175ae294f96861b07600a7156154a086ba7111a/pillow-12.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:110486b79f2d112cf6add83b28b627e369219388f64ef2f960fef9ebaf54c642", size = 6230472, upload-time = "2025-10-15T18:21:51.052Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/89/63427f51c64209c5e23d4d52071c8d0f21024d3a8a487737caaf614a5795/pillow-12.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5269cc1caeedb67e6f7269a42014f381f45e2e7cd42d834ede3c703a1d915fe3", size = 8033887, upload-time = "2025-10-15T18:21:52.604Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/1b/c9711318d4901093c15840f268ad649459cd81984c9ec9887756cca049a5/pillow-12.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa5129de4e174daccbc59d0a3b6d20eaf24417d59851c07ebb37aeb02947987c", size = 6343964, upload-time = "2025-10-15T18:21:54.619Z" },
+ { url = "https://files.pythonhosted.org/packages/41/1e/db9470f2d030b4995083044cd8738cdd1bf773106819f6d8ba12597d5352/pillow-12.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bee2a6db3a7242ea309aa7ee8e2780726fed67ff4e5b40169f2c940e7eb09227", size = 7034756, upload-time = "2025-10-15T18:21:56.151Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/b0/6177a8bdd5ee4ed87cba2de5a3cc1db55ffbbec6176784ce5bb75aa96798/pillow-12.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:90387104ee8400a7b4598253b4c406f8958f59fcf983a6cea2b50d59f7d63d0b", size = 6458075, upload-time = "2025-10-15T18:21:57.759Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/5e/61537aa6fa977922c6a03253a0e727e6e4a72381a80d63ad8eec350684f2/pillow-12.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc91a56697869546d1b8f0a3ff35224557ae7f881050e99f615e0119bf934b4e", size = 7125955, upload-time = "2025-10-15T18:21:59.372Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/3d/d5033539344ee3cbd9a4d69e12e63ca3a44a739eb2d4c8da350a3d38edd7/pillow-12.0.0-cp311-cp311-win32.whl", hash = "sha256:27f95b12453d165099c84f8a8bfdfd46b9e4bda9e0e4b65f0635430027f55739", size = 6298440, upload-time = "2025-10-15T18:22:00.982Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/42/aaca386de5cc8bd8a0254516957c1f265e3521c91515b16e286c662854c4/pillow-12.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b583dc9070312190192631373c6c8ed277254aa6e6084b74bdd0a6d3b221608e", size = 6999256, upload-time = "2025-10-15T18:22:02.617Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/f1/9197c9c2d5708b785f631a6dfbfa8eb3fb9672837cb92ae9af812c13b4ed/pillow-12.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:759de84a33be3b178a64c8ba28ad5c135900359e85fb662bc6e403ad4407791d", size = 2436025, upload-time = "2025-10-15T18:22:04.598Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" },
+ { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" },
+ { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" },
+ { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" },
+ { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" },
+ { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" },
+ { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" },
+ { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" },
+ { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" },
+ { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" },
+ { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" },
+ { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" },
+ { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" },
+ { url = "https://files.pythonhosted.org/packages/54/2a/9a8c6ba2c2c07b71bec92cf63e03370ca5e5f5c5b119b742bcc0cde3f9c5/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:beeae3f27f62308f1ddbcfb0690bf44b10732f2ef43758f169d5e9303165d3f9", size = 4045531, upload-time = "2025-10-15T18:23:10.121Z" },
+ { url = "https://files.pythonhosted.org/packages/84/54/836fdbf1bfb3d66a59f0189ff0b9f5f666cee09c6188309300df04ad71fa/pillow-12.0.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d4827615da15cd59784ce39d3388275ec093ae3ee8d7f0c089b76fa87af756c2", size = 4120554, upload-time = "2025-10-15T18:23:12.14Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/cd/16aec9f0da4793e98e6b54778a5fbce4f375c6646fe662e80600b8797379/pillow-12.0.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:3e42edad50b6909089750e65c91aa09aaf1e0a71310d383f11321b27c224ed8a", size = 3576812, upload-time = "2025-10-15T18:23:13.962Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/b7/13957fda356dc46339298b351cae0d327704986337c3c69bb54628c88155/pillow-12.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e5d8efac84c9afcb40914ab49ba063d94f5dbdf5066db4482c66a992f47a3a3b", size = 5252689, upload-time = "2025-10-15T18:23:15.562Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/f5/eae31a306341d8f331f43edb2e9122c7661b975433de5e447939ae61c5da/pillow-12.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:266cd5f2b63ff316d5a1bba46268e603c9caf5606d44f38c2873c380950576ad", size = 4650186, upload-time = "2025-10-15T18:23:17.379Z" },
+ { url = "https://files.pythonhosted.org/packages/86/62/2a88339aa40c4c77e79108facbd307d6091e2c0eb5b8d3cf4977cfca2fe6/pillow-12.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:58eea5ebe51504057dd95c5b77d21700b77615ab0243d8152793dc00eb4faf01", size = 6230308, upload-time = "2025-10-15T18:23:18.971Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/33/5425a8992bcb32d1cb9fa3dd39a89e613d09a22f2c8083b7bf43c455f760/pillow-12.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f13711b1a5ba512d647a0e4ba79280d3a9a045aaf7e0cc6fbe96b91d4cdf6b0c", size = 8039222, upload-time = "2025-10-15T18:23:20.909Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/61/3f5d3b35c5728f37953d3eec5b5f3e77111949523bd2dd7f31a851e50690/pillow-12.0.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6846bd2d116ff42cba6b646edf5bf61d37e5cbd256425fa089fee4ff5c07a99e", size = 6346657, upload-time = "2025-10-15T18:23:23.077Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/be/ee90a3d79271227e0f0a33c453531efd6ed14b2e708596ba5dd9be948da3/pillow-12.0.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c98fa880d695de164b4135a52fd2e9cd7b7c90a9d8ac5e9e443a24a95ef9248e", size = 7038482, upload-time = "2025-10-15T18:23:25.005Z" },
+ { url = "https://files.pythonhosted.org/packages/44/34/a16b6a4d1ad727de390e9bd9f19f5f669e079e5826ec0f329010ddea492f/pillow-12.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa3ed2a29a9e9d2d488b4da81dcb54720ac3104a20bf0bd273f1e4648aff5af9", size = 6461416, upload-time = "2025-10-15T18:23:27.009Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/39/1aa5850d2ade7d7ba9f54e4e4c17077244ff7a2d9e25998c38a29749eb3f/pillow-12.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d034140032870024e6b9892c692fe2968493790dd57208b2c37e3fb35f6df3ab", size = 7131584, upload-time = "2025-10-15T18:23:29.752Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/db/4fae862f8fad0167073a7733973bfa955f47e2cac3dc3e3e6257d10fab4a/pillow-12.0.0-cp314-cp314-win32.whl", hash = "sha256:1b1b133e6e16105f524a8dec491e0586d072948ce15c9b914e41cdadd209052b", size = 6400621, upload-time = "2025-10-15T18:23:32.06Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/24/b350c31543fb0107ab2599464d7e28e6f856027aadda995022e695313d94/pillow-12.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:8dc232e39d409036af549c86f24aed8273a40ffa459981146829a324e0848b4b", size = 7142916, upload-time = "2025-10-15T18:23:34.71Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/9b/0ba5a6fd9351793996ef7487c4fdbde8d3f5f75dbedc093bb598648fddf0/pillow-12.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:d52610d51e265a51518692045e372a4c363056130d922a7351429ac9f27e70b0", size = 2523836, upload-time = "2025-10-15T18:23:36.967Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/7a/ceee0840aebc579af529b523d530840338ecf63992395842e54edc805987/pillow-12.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1979f4566bb96c1e50a62d9831e2ea2d1211761e5662afc545fa766f996632f6", size = 5255092, upload-time = "2025-10-15T18:23:38.573Z" },
+ { url = "https://files.pythonhosted.org/packages/44/76/20776057b4bfd1aef4eeca992ebde0f53a4dce874f3ae693d0ec90a4f79b/pillow-12.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b2e4b27a6e15b04832fe9bf292b94b5ca156016bbc1ea9c2c20098a0320d6cf6", size = 4653158, upload-time = "2025-10-15T18:23:40.238Z" },
+ { url = "https://files.pythonhosted.org/packages/82/3f/d9ff92ace07be8836b4e7e87e6a4c7a8318d47c2f1463ffcf121fc57d9cb/pillow-12.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fb3096c30df99fd01c7bf8e544f392103d0795b9f98ba71a8054bcbf56b255f1", size = 6267882, upload-time = "2025-10-15T18:23:42.434Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/7a/4f7ff87f00d3ad33ba21af78bfcd2f032107710baf8280e3722ceec28cda/pillow-12.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7438839e9e053ef79f7112c881cef684013855016f928b168b81ed5835f3e75e", size = 8071001, upload-time = "2025-10-15T18:23:44.29Z" },
+ { url = "https://files.pythonhosted.org/packages/75/87/fcea108944a52dad8cca0715ae6247e271eb80459364a98518f1e4f480c1/pillow-12.0.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d5c411a8eaa2299322b647cd932586b1427367fd3184ffbb8f7a219ea2041ca", size = 6380146, upload-time = "2025-10-15T18:23:46.065Z" },
+ { url = "https://files.pythonhosted.org/packages/91/52/0d31b5e571ef5fd111d2978b84603fce26aba1b6092f28e941cb46570745/pillow-12.0.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7e091d464ac59d2c7ad8e7e08105eaf9dafbc3883fd7265ffccc2baad6ac925", size = 7067344, upload-time = "2025-10-15T18:23:47.898Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/f4/2dd3d721f875f928d48e83bb30a434dee75a2531bca839bb996bb0aa5a91/pillow-12.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:792a2c0be4dcc18af9d4a2dfd8a11a17d5e25274a1062b0ec1c2d79c76f3e7f8", size = 6491864, upload-time = "2025-10-15T18:23:49.607Z" },
+ { url = "https://files.pythonhosted.org/packages/30/4b/667dfcf3d61fc309ba5a15b141845cece5915e39b99c1ceab0f34bf1d124/pillow-12.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:afbefa430092f71a9593a99ab6a4e7538bc9eabbf7bf94f91510d3503943edc4", size = 7158911, upload-time = "2025-10-15T18:23:51.351Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/2f/16cabcc6426c32218ace36bf0d55955e813f2958afddbf1d391849fee9d1/pillow-12.0.0-cp314-cp314t-win32.whl", hash = "sha256:3830c769decf88f1289680a59d4f4c46c72573446352e2befec9a8512104fa52", size = 6408045, upload-time = "2025-10-15T18:23:53.177Z" },
+ { url = "https://files.pythonhosted.org/packages/35/73/e29aa0c9c666cf787628d3f0dcf379f4791fba79f4936d02f8b37165bdf8/pillow-12.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:905b0365b210c73afb0ebe9101a32572152dfd1c144c7e28968a331b9217b94a", size = 7148282, upload-time = "2025-10-15T18:23:55.316Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/70/6b41bdcddf541b437bbb9f47f94d2db5d9ddef6c37ccab8c9107743748a4/pillow-12.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:99353a06902c2e43b43e8ff74ee65a7d90307d82370604746738a1e0661ccca7", size = 2525630, upload-time = "2025-10-15T18:23:57.149Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/b3/582327e6c9f86d037b63beebe981425d6811104cb443e8193824ef1a2f27/pillow-12.0.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b22bd8c974942477156be55a768f7aa37c46904c175be4e158b6a86e3a6b7ca8", size = 5215068, upload-time = "2025-10-15T18:23:59.594Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/d6/67748211d119f3b6540baf90f92fae73ae51d5217b171b0e8b5f7e5d558f/pillow-12.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:805ebf596939e48dbb2e4922a1d3852cfc25c38160751ce02da93058b48d252a", size = 4614994, upload-time = "2025-10-15T18:24:01.669Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/e1/f8281e5d844c41872b273b9f2c34a4bf64ca08905668c8ae730eedc7c9fa/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae81479f77420d217def5f54b5b9d279804d17e982e0f2fa19b1d1e14ab5197", size = 5246639, upload-time = "2025-10-15T18:24:03.403Z" },
+ { url = "https://files.pythonhosted.org/packages/94/5a/0d8ab8ffe8a102ff5df60d0de5af309015163bf710c7bb3e8311dd3b3ad0/pillow-12.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aeaefa96c768fc66818730b952a862235d68825c178f1b3ffd4efd7ad2edcb7c", size = 6986839, upload-time = "2025-10-15T18:24:05.344Z" },
+ { url = "https://files.pythonhosted.org/packages/20/2e/3434380e8110b76cd9eb00a363c484b050f949b4bbe84ba770bb8508a02c/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f2d0abef9e4e2f349305a4f8cc784a8a6c2f58a8c4892eea13b10a943bd26e", size = 5313505, upload-time = "2025-10-15T18:24:07.137Z" },
+ { url = "https://files.pythonhosted.org/packages/57/ca/5a9d38900d9d74785141d6580950fe705de68af735ff6e727cb911b64740/pillow-12.0.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bdee52571a343d721fb2eb3b090a82d959ff37fc631e3f70422e0c2e029f3e76", size = 5963654, upload-time = "2025-10-15T18:24:09.579Z" },
+ { url = "https://files.pythonhosted.org/packages/95/7e/f896623c3c635a90537ac093c6a618ebe1a90d87206e42309cb5d98a1b9e/pillow-12.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b290fd8aa38422444d4b50d579de197557f182ef1068b75f5aa8558638b8d0a5", size = 6997850, upload-time = "2025-10-15T18:24:11.495Z" },
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
+]
+
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
+[[package]]
+name = "pre-commit"
+version = "4.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cfgv" },
+ { name = "identify" },
+ { name = "nodeenv" },
+ { name = "pyyaml" },
+ { name = "virtualenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f4/9b/6a4ffb4ed980519da959e1cf3122fc6cb41211daa58dbae1c73c0e519a37/pre_commit-4.5.0.tar.gz", hash = "sha256:dc5a065e932b19fc1d4c653c6939068fe54325af8e741e74e88db4d28a4dd66b", size = 198428, upload-time = "2025-11-22T21:02:42.304Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429, upload-time = "2025-11-22T21:02:40.836Z" },
+]
+
+[[package]]
+name = "prompt-toolkit"
+version = "3.0.52"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "wcwidth" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
+]
+
+[[package]]
+name = "psutil"
+version = "7.1.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
+ { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
+ { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
+ { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
+ { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
+ { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
+ { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
+ { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
+]
+
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
+]
+
+[[package]]
+name = "pycodestyle"
+version = "2.14.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" },
+]
+
+[[package]]
+name = "pycparser"
+version = "2.23"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
+]
+
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
+[[package]]
+name = "pyogrio"
+version = "0.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "numpy" },
+ { name = "packaging" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/49/d4/12f86b1ed09721363da4c09622464b604c851a9223fc0c6b393fb2012208/pyogrio-0.12.1.tar.gz", hash = "sha256:e548ab705bb3e5383693717de1e6c76da97f3762ab92522cb310f93128a75ff1", size = 303289, upload-time = "2025-11-28T19:04:53.341Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/02/46/b2c2dcdfd88759b56f103365905fffb85e8b08c1db1ec7c8f8b4c4c26016/pyogrio-0.12.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:01b322dac2a258d24b024d1028dcaa03c9bb6d9c3988b86d298a64873d10dc65", size = 23670744, upload-time = "2025-11-28T19:03:11.299Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/21/b69f1bc51d805c00dd7c484a18e1fd2e75b41da1d9f5b8591d7d9d4a7d2f/pyogrio-0.12.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:e10087abcbd6b7e8212560a7002984e5078ac7b3a969ddc2c9929044dbb0d403", size = 25246184, upload-time = "2025-11-28T19:03:13.997Z" },
+ { url = "https://files.pythonhosted.org/packages/19/8c/b6aae08e8fcc4f2a903da5f6bd8f888d2b6d7290e54dde5abe15b4cca8df/pyogrio-0.12.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f6c621972b09fd81a32317e742c69ff4a7763a803da211361a78317f9577765", size = 31434449, upload-time = "2025-11-28T19:03:16.777Z" },
+ { url = "https://files.pythonhosted.org/packages/70/f9/9538fa893c29a3fdfeddf3b4c9f8db77f2d4134bc766587929fec8405ebf/pyogrio-0.12.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c38253427b688464caad5316d4ebcec116b5e13f1f02cc4e3588502f136ca1b4", size = 30987586, upload-time = "2025-11-28T19:03:19.586Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a4/0aef5837b4e11840f501e48e01c31242838476c4f4aff9c05e228a083982/pyogrio-0.12.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5f47787251de7ce13cc06038da93a1214dc283cbccf816be6e03c080358226c8", size = 32534386, upload-time = "2025-11-28T19:03:22.292Z" },
+ { url = "https://files.pythonhosted.org/packages/34/97/e8f2ed8a339152b86f8403c258ae5d5f23ab32d690eeb0545bb3473d0c69/pyogrio-0.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:c1d756cf2da4cdf5609779f260d1e1e89be023184225855d6f3dcd33bbe17cb0", size = 22941718, upload-time = "2025-11-28T19:03:24.82Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/e0/656b6536549d41b5aec57e0deca1f269b4f17532f0636836f587e581603a/pyogrio-0.12.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:7a0d5ca39184030aec4cde30f4258f75b227a854530d2659babc8189d76e657d", size = 23661857, upload-time = "2025-11-28T19:03:27.744Z" },
+ { url = "https://files.pythonhosted.org/packages/14/78/313259e40da728bdb60106ffdc7ea8224d164498cb838ecb79b634aab967/pyogrio-0.12.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:feaff42bbe8087ca0b30e33b09d1ce049ca55fe83ad83db1139ef37d1d04f30c", size = 25237106, upload-time = "2025-11-28T19:03:30.018Z" },
+ { url = "https://files.pythonhosted.org/packages/8f/ca/5368571a8b00b941ccfbe6ea29a5566aaffd45d4eb1553b956f7755af43e/pyogrio-0.12.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:81096a5139532de5a8003ef02b41d5d2444cb382a9aecd1165b447eb549180d3", size = 31417048, upload-time = "2025-11-28T19:03:32.572Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/85/6eeb875f27bf498d657eb5dab9f58e4c48b36c9037122787abee9a1ba4ba/pyogrio-0.12.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:41b78863f782f7a113ed0d36a5dc74d59735bd3a82af53510899bb02a18b06bb", size = 30952115, upload-time = "2025-11-28T19:03:35.332Z" },
+ { url = "https://files.pythonhosted.org/packages/36/f7/cf8bec9024625947e1a71441906f60a5fa6f9e4c441c4428037e73b1fcc8/pyogrio-0.12.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:8b65be8c4258b27cc8f919b21929cecdadda4c353e3637fa30850339ef4d15c5", size = 32537246, upload-time = "2025-11-28T19:03:37.969Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/10/7c9f5e428273574e69f217eba3a6c0c42936188ad4dcd9e2c41ebb711188/pyogrio-0.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:1291b866c2c81d991bda15021b08b3621709b40ee3a85689229929e9465788bf", size = 22933980, upload-time = "2025-11-28T19:03:41.047Z" },
+ { url = "https://files.pythonhosted.org/packages/be/56/f56e79f71b84aa9bea25fdde39fab3846841bd7926be96f623eb7253b7e1/pyogrio-0.12.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:ec0e47a5a704e575092b2fd5c83fa0472a1d421e590f94093eb837bb0a11125d", size = 23658483, upload-time = "2025-11-28T19:03:43.567Z" },
+ { url = "https://files.pythonhosted.org/packages/66/ac/5559f8a35d58a16cbb2dd7602dd11936ff8796d8c9bf789f14da88764ec3/pyogrio-0.12.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:b4c888fc08f388be4dd99dfca5e84a5cdc5994deeec0230cc45144d3460e2b21", size = 25232737, upload-time = "2025-11-28T19:03:45.92Z" },
+ { url = "https://files.pythonhosted.org/packages/59/58/925f1c129ddd7cbba8dea4e7609797cea7a76dbc863ac9afd318a679c4b9/pyogrio-0.12.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:73a88436f9962750d782853727897ac2722cac5900d920e39fab3e56d7a6a7f1", size = 31377986, upload-time = "2025-11-28T19:03:48.495Z" },
+ { url = "https://files.pythonhosted.org/packages/18/5f/c87034e92847b1844d0e8492a6a8e3301147d32c5e57909397ce64dbedf5/pyogrio-0.12.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b5d248a0d59fe9bbf9a35690b70004c67830ee0ebe7d4f7bb8ffd8659f684b3a", size = 30915791, upload-time = "2025-11-28T19:03:51.267Z" },
+ { url = "https://files.pythonhosted.org/packages/46/35/b874f79d03e9f900012cf609f7fff97b77164f2e14ee5aac282f8a999c1b/pyogrio-0.12.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0622bc1a186421547660271083079b38d42e6f868802936d8538c0b379f1ab6b", size = 32499754, upload-time = "2025-11-28T19:03:58.776Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/c4/705678c9c4200130290b3a104b45c0cc10aaa48fcef3b2585b34e34ab3e1/pyogrio-0.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:207bd60c7ffbcea84584596e3637653aa7095e9ee20fa408f90c7f9460392613", size = 22933945, upload-time = "2025-11-28T19:04:01.551Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/e0/d92d4944001330bc87742d43f112d63d12fc89378b6187e62ff3fc1e8e85/pyogrio-0.12.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:1511b39a283fa27cda906cd187a791578942a87a40b6a06697d9b43bb8ac80b0", size = 23692697, upload-time = "2025-11-28T19:04:04.208Z" },
+ { url = "https://files.pythonhosted.org/packages/e5/d7/40acbe06d1b1140e3bb27b79e9163776469c1dc785f1be7d9a7fc7b95c87/pyogrio-0.12.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:e486cd6aa9ea8a15394a5f84e019d61ec18f257eeeb642348bd68c3d1e57280b", size = 25258083, upload-time = "2025-11-28T19:04:07.121Z" },
+ { url = "https://files.pythonhosted.org/packages/87/a1/39fefd9cddd95986700524f43d3093b4350f6e4fc200623c3838424a5080/pyogrio-0.12.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3f1a19f63bfd1d3042e45f37ad1d6598123a5a604b6c4ba3f38b419273486cd", size = 31368995, upload-time = "2025-11-28T19:04:09.88Z" },
+ { url = "https://files.pythonhosted.org/packages/18/d7/da88c566e67d741a03851eb8d01358949d52e0b0fc2cd953582dc6d89ff8/pyogrio-0.12.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:f3dcc59b3316b8a0f59346bcc638a4d69997864a4d21da839192f50c4c92369a", size = 31035589, upload-time = "2025-11-28T19:04:12.993Z" },
+ { url = "https://files.pythonhosted.org/packages/11/ac/8f0199f0d31b8ddbc4b4ea1918df8070fdf3e0a63100b898633ec9396224/pyogrio-0.12.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:a0643e041dee3e8e038fce69f52a915ecb486e6d7b674c0f9919f3c9e9629689", size = 32487973, upload-time = "2025-11-28T19:04:16.103Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/64/8541a27e9635a335835d234dfaeb19d6c26097fd88224eda7791f83ca98d/pyogrio-0.12.1-cp313-cp313t-win_amd64.whl", hash = "sha256:5881017f29e110d3613819667657844d8e961b747f2d35cf92f273c27af6d068", size = 22987374, upload-time = "2025-11-28T19:04:18.91Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/6f/b4d5e285e08c0c60bcc23b50d73038ddc7335d8de79cc25678cd486a3db0/pyogrio-0.12.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:5a1b0453d1c9e7b03715dd57296c8f3790acb8b50d7e3b5844b3074a18f50709", size = 23660673, upload-time = "2025-11-28T19:04:21.662Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/75/4b29e71489c5551aa1a1c5ca8c5160a60203c94f2f68c87c0e3614d58965/pyogrio-0.12.1-cp314-cp314-macosx_12_0_x86_64.whl", hash = "sha256:e7ee560422239dd09ca7f8284cc8483a8919c30d25f3049bb0249bff4c38dec4", size = 25232194, upload-time = "2025-11-28T19:04:23.975Z" },
+ { url = "https://files.pythonhosted.org/packages/89/6e/e9929d2261a07c36301983de2767bcde90d441ab5bf1d767ce56dd07f8b4/pyogrio-0.12.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:648c6f7f5f214d30e6cf493b4af1d59782907ac068af9119ca35f18153d6865a", size = 31336936, upload-time = "2025-11-28T19:04:26.594Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/9e/c59941d734ed936d4e5c89b4b99cb5541307cc42b3fd466ee78a1850c177/pyogrio-0.12.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:58042584f3fd4cabb0f55d26c1405053f656be8a5c266c38140316a1e981aca0", size = 30902210, upload-time = "2025-11-28T19:04:29.143Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/68/cc07320a63f9c2586e60bf11d148b00e12d0e707673bffe609bbdcb7e754/pyogrio-0.12.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b438e38e4ccbaedaa5cb5824ff5de5539315d9b2fde6547c1e816576924ee8ca", size = 32461674, upload-time = "2025-11-28T19:04:31.792Z" },
+ { url = "https://files.pythonhosted.org/packages/13/bc/e4522f429c45a3b6ad28185849dd76e5c8718b780883c4795e7ee41841ae/pyogrio-0.12.1-cp314-cp314-win_amd64.whl", hash = "sha256:f1d8d8a2fea3781dc2a05982c050259261ebc0f6c5e03732d6d79d582adf9363", size = 23550575, upload-time = "2025-11-28T19:04:34.556Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/ac/34f0664d0e391994a7b68529ae07a96432b2b4926dbac173ddc4ec94d310/pyogrio-0.12.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9fe7286946f35a73e6370dc5855bc7a5e8e7babf9e4a8bad7a3279a1d94c7ea9", size = 23694285, upload-time = "2025-11-28T19:04:37.833Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/93/873255529faff1da09d0b27287e85ec805a318c60c0c74fd7df77f94e557/pyogrio-0.12.1-cp314-cp314t-macosx_12_0_x86_64.whl", hash = "sha256:2c50345b382f1be801d654ec22c70ee974d6057d4ba7afe984b55f2192bc94ee", size = 25259825, upload-time = "2025-11-28T19:04:40.125Z" },
+ { url = "https://files.pythonhosted.org/packages/27/95/4d4c3644695d99c6fa0b0b42f0d6266ae9dfaf64478a3371eaac950bdd02/pyogrio-0.12.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0db95765ac0ca935c7fe579e29451294e3ab19c317b0c59c31fbe92a69155e0", size = 31371995, upload-time = "2025-11-28T19:04:42.736Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/6f/71f6bcca8754c8bf55a4b7153c61c91f8ac5ba992568e9fa3e54a0ee76fd/pyogrio-0.12.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:fc882779075982b93064b3bf3d8642514a6df00d9dd752493b104817072cfb01", size = 31035498, upload-time = "2025-11-28T19:04:45.79Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/47/75c1aa165a988347317afab9b938a01ad25dbca559b582ea34473703dc38/pyogrio-0.12.1-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:806f620e0c54b54dbdd65e9b6368d24f344cda84c9343364b40a57eb3e1c4dca", size = 32496390, upload-time = "2025-11-28T19:04:48.786Z" },
+ { url = "https://files.pythonhosted.org/packages/31/93/4641dc5d952f6bdb71dabad2c50e3f8a5d58396cdea6ff8f8a08bfd4f4a6/pyogrio-0.12.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5399f66730978d8852ef5f44dbafa0f738e7f28f4f784349f36830b69a9d2134", size = 23620996, upload-time = "2025-11-28T19:04:51.132Z" },
+]
+
+[[package]]
+name = "pyparsing"
+version = "3.2.5"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" },
+]
+
+[[package]]
+name = "pyproj"
+version = "3.7.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/04/90/67bd7260b4ea9b8b20b4f58afef6c223ecb3abf368eb4ec5bc2cdef81b49/pyproj-3.7.2.tar.gz", hash = "sha256:39a0cf1ecc7e282d1d30f36594ebd55c9fae1fda8a2622cee5d100430628f88c", size = 226279, upload-time = "2025-08-14T12:05:42.18Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a6/bd/f205552cd1713b08f93b09e39a3ec99edef0b3ebbbca67b486fdf1abe2de/pyproj-3.7.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2514d61f24c4e0bb9913e2c51487ecdaeca5f8748d8313c933693416ca41d4d5", size = 6227022, upload-time = "2025-08-14T12:03:51.474Z" },
+ { url = "https://files.pythonhosted.org/packages/75/4c/9a937e659b8b418ab573c6d340d27e68716928953273e0837e7922fcac34/pyproj-3.7.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:8693ca3892d82e70de077701ee76dd13d7bca4ae1c9d1e739d72004df015923a", size = 4625810, upload-time = "2025-08-14T12:03:53.808Z" },
+ { url = "https://files.pythonhosted.org/packages/c0/7d/a9f41e814dc4d1dc54e95b2ccaf0b3ebe3eb18b1740df05fe334724c3d89/pyproj-3.7.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5e26484d80fea56273ed1555abaea161e9661d81a6c07815d54b8e883d4ceb25", size = 9638694, upload-time = "2025-08-14T12:03:55.669Z" },
+ { url = "https://files.pythonhosted.org/packages/ad/ab/9bdb4a6216b712a1f9aab1c0fcbee5d3726f34a366f29c3e8c08a78d6b70/pyproj-3.7.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:281cb92847814e8018010c48b4069ff858a30236638631c1a91dd7bfa68f8a8a", size = 9493977, upload-time = "2025-08-14T12:03:57.937Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/db/2db75b1b6190f1137b1c4e8ef6a22e1c338e46320f6329bfac819143e063/pyproj-3.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9c8577f0b7bb09118ec2e57e3babdc977127dd66326d6c5d755c76b063e6d9dc", size = 10841151, upload-time = "2025-08-14T12:04:00.271Z" },
+ { url = "https://files.pythonhosted.org/packages/89/f7/989643394ba23a286e9b7b3f09981496172f9e0d4512457ffea7dc47ffc7/pyproj-3.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a23f59904fac3a5e7364b3aa44d288234af267ca041adb2c2b14a903cd5d3ac5", size = 10751585, upload-time = "2025-08-14T12:04:02.228Z" },
+ { url = "https://files.pythonhosted.org/packages/53/6d/ad928fe975a6c14a093c92e6a319ca18f479f3336bb353a740bdba335681/pyproj-3.7.2-cp311-cp311-win32.whl", hash = "sha256:f2af4ed34b2cf3e031a2d85b067a3ecbd38df073c567e04b52fa7a0202afde8a", size = 5908533, upload-time = "2025-08-14T12:04:04.821Z" },
+ { url = "https://files.pythonhosted.org/packages/79/e0/b95584605cec9ed50b7ebaf7975d1c4ddeec5a86b7a20554ed8b60042bd7/pyproj-3.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:0b7cb633565129677b2a183c4d807c727d1c736fcb0568a12299383056e67433", size = 6320742, upload-time = "2025-08-14T12:04:06.357Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/4d/536e8f93bca808175c2d0a5ac9fdf69b960d8ab6b14f25030dccb07464d7/pyproj-3.7.2-cp311-cp311-win_arm64.whl", hash = "sha256:38b08d85e3a38e455625b80e9eb9f78027c8e2649a21dec4df1f9c3525460c71", size = 6245772, upload-time = "2025-08-14T12:04:08.365Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/ab/9893ea9fb066be70ed9074ae543914a618c131ed8dff2da1e08b3a4df4db/pyproj-3.7.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:0a9bb26a6356fb5b033433a6d1b4542158fb71e3c51de49b4c318a1dff3aeaab", size = 6219832, upload-time = "2025-08-14T12:04:10.264Z" },
+ { url = "https://files.pythonhosted.org/packages/53/78/4c64199146eed7184eb0e85bedec60a4aa8853b6ffe1ab1f3a8b962e70a0/pyproj-3.7.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:567caa03021178861fad27fabde87500ec6d2ee173dd32f3e2d9871e40eebd68", size = 4620650, upload-time = "2025-08-14T12:04:11.978Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/ac/14a78d17943898a93ef4f8c6a9d4169911c994e3161e54a7cedeba9d8dde/pyproj-3.7.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c203101d1dc3c038a56cff0447acc515dd29d6e14811406ac539c21eed422b2a", size = 9667087, upload-time = "2025-08-14T12:04:13.964Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/be/212882c450bba74fc8d7d35cbd57e4af84792f0a56194819d98106b075af/pyproj-3.7.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:1edc34266c0c23ced85f95a1ee8b47c9035eae6aca5b6b340327250e8e281630", size = 9552797, upload-time = "2025-08-14T12:04:16.624Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/c0/c0f25c87b5d2a8686341c53c1792a222a480d6c9caf60311fec12c99ec26/pyproj-3.7.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa9f26c21bc0e2dc3d224cb1eb4020cf23e76af179a7c66fea49b828611e4260", size = 10837036, upload-time = "2025-08-14T12:04:18.733Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/37/5cbd6772addde2090c91113332623a86e8c7d583eccb2ad02ea634c4a89f/pyproj-3.7.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9428b318530625cb389b9ddc9c51251e172808a4af79b82809376daaeabe5e9", size = 10775952, upload-time = "2025-08-14T12:04:20.709Z" },
+ { url = "https://files.pythonhosted.org/packages/69/a1/dc250e3cf83eb4b3b9a2cf86fdb5e25288bd40037ae449695550f9e96b2f/pyproj-3.7.2-cp312-cp312-win32.whl", hash = "sha256:b3d99ed57d319da042f175f4554fc7038aa4bcecc4ac89e217e350346b742c9d", size = 5898872, upload-time = "2025-08-14T12:04:22.485Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/a6/6fe724b72b70f2b00152d77282e14964d60ab092ec225e67c196c9b463e5/pyproj-3.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:11614a054cd86a2ed968a657d00987a86eeb91fdcbd9ad3310478685dc14a128", size = 6312176, upload-time = "2025-08-14T12:04:24.736Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/68/915cc32c02a91e76d02c8f55d5a138d6ef9e47a0d96d259df98f4842e558/pyproj-3.7.2-cp312-cp312-win_arm64.whl", hash = "sha256:509a146d1398bafe4f53273398c3bb0b4732535065fa995270e52a9d3676bca3", size = 6233452, upload-time = "2025-08-14T12:04:27.287Z" },
+ { url = "https://files.pythonhosted.org/packages/be/14/faf1b90d267cea68d7e70662e7f88cefdb1bc890bd596c74b959e0517a72/pyproj-3.7.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:19466e529b1b15eeefdf8ff26b06fa745856c044f2f77bf0edbae94078c1dfa1", size = 6214580, upload-time = "2025-08-14T12:04:28.804Z" },
+ { url = "https://files.pythonhosted.org/packages/35/48/da9a45b184d375f62667f62eba0ca68569b0bd980a0bb7ffcc1d50440520/pyproj-3.7.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:c79b9b84c4a626c5dc324c0d666be0bfcebd99f7538d66e8898c2444221b3da7", size = 4615388, upload-time = "2025-08-14T12:04:30.553Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/e7/d2b459a4a64bca328b712c1b544e109df88e5c800f7c143cfbc404d39bfb/pyproj-3.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ceecf374cacca317bc09e165db38ac548ee3cad07c3609442bd70311c59c21aa", size = 9628455, upload-time = "2025-08-14T12:04:32.435Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/85/c2b1706e51942de19076eff082f8495e57d5151364e78b5bef4af4a1d94a/pyproj-3.7.2-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5141a538ffdbe4bfd157421828bb2e07123a90a7a2d6f30fa1462abcfb5ce681", size = 9514269, upload-time = "2025-08-14T12:04:34.599Z" },
+ { url = "https://files.pythonhosted.org/packages/34/38/07a9b89ae7467872f9a476883a5bad9e4f4d1219d31060f0f2b282276cbe/pyproj-3.7.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f000841e98ea99acbb7b8ca168d67773b0191de95187228a16110245c5d954d5", size = 10808437, upload-time = "2025-08-14T12:04:36.485Z" },
+ { url = "https://files.pythonhosted.org/packages/12/56/fda1daeabbd39dec5b07f67233d09f31facb762587b498e6fc4572be9837/pyproj-3.7.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8115faf2597f281a42ab608ceac346b4eb1383d3b45ab474fd37341c4bf82a67", size = 10745540, upload-time = "2025-08-14T12:04:38.568Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/90/c793182cbba65a39a11db2ac6b479fe76c59e6509ae75e5744c344a0da9d/pyproj-3.7.2-cp313-cp313-win32.whl", hash = "sha256:f18c0579dd6be00b970cb1a6719197fceecc407515bab37da0066f0184aafdf3", size = 5896506, upload-time = "2025-08-14T12:04:41.059Z" },
+ { url = "https://files.pythonhosted.org/packages/be/0f/747974129cf0d800906f81cd25efd098c96509026e454d4b66868779ab04/pyproj-3.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:bb41c29d5f60854b1075853fe80c58950b398d4ebb404eb532536ac8d2834ed7", size = 6310195, upload-time = "2025-08-14T12:04:42.974Z" },
+ { url = "https://files.pythonhosted.org/packages/82/64/fc7598a53172c4931ec6edf5228280663063150625d3f6423b4c20f9daff/pyproj-3.7.2-cp313-cp313-win_arm64.whl", hash = "sha256:2b617d573be4118c11cd96b8891a0b7f65778fa7733ed8ecdb297a447d439100", size = 6230748, upload-time = "2025-08-14T12:04:44.491Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/f0/611dd5cddb0d277f94b7af12981f56e1441bf8d22695065d4f0df5218498/pyproj-3.7.2-cp313-cp313t-macosx_13_0_x86_64.whl", hash = "sha256:d27b48f0e81beeaa2b4d60c516c3a1cfbb0c7ff6ef71256d8e9c07792f735279", size = 6241729, upload-time = "2025-08-14T12:04:46.274Z" },
+ { url = "https://files.pythonhosted.org/packages/15/93/40bd4a6c523ff9965e480870611aed7eda5aa2c6128c6537345a2b77b542/pyproj-3.7.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:55a3610d75023c7b1c6e583e48ef8f62918e85a2ae81300569d9f104d6684bb6", size = 4652497, upload-time = "2025-08-14T12:04:48.203Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/ae/7150ead53c117880b35e0d37960d3138fe640a235feb9605cb9386f50bb0/pyproj-3.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8d7349182fa622696787cc9e195508d2a41a64765da9b8a6bee846702b9e6220", size = 9942610, upload-time = "2025-08-14T12:04:49.652Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/17/7a4a7eafecf2b46ab64e5c08176c20ceb5844b503eaa551bf12ccac77322/pyproj-3.7.2-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:d230b186eb876ed4f29a7c5ee310144c3a0e44e89e55f65fb3607e13f6db337c", size = 9692390, upload-time = "2025-08-14T12:04:51.731Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/55/ae18f040f6410f0ea547a21ada7ef3e26e6c82befa125b303b02759c0e9d/pyproj-3.7.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:237499c7862c578d0369e2b8ac56eec550e391a025ff70e2af8417139dabb41c", size = 11047596, upload-time = "2025-08-14T12:04:53.748Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/2e/d3fff4d2909473f26ae799f9dda04caa322c417a51ff3b25763f7d03b233/pyproj-3.7.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8c225f5978abd506fd9a78eaaf794435e823c9156091cabaab5374efb29d7f69", size = 10896975, upload-time = "2025-08-14T12:04:55.875Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/bc/8fc7d3963d87057b7b51ebe68c1e7c51c23129eee5072ba6b86558544a46/pyproj-3.7.2-cp313-cp313t-win32.whl", hash = "sha256:2da731876d27639ff9d2d81c151f6ab90a1546455fabd93368e753047be344a2", size = 5953057, upload-time = "2025-08-14T12:04:58.466Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/27/ea9809966cc47d2d51e6d5ae631ea895f7c7c7b9b3c29718f900a8f7d197/pyproj-3.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f54d91ae18dd23b6c0ab48126d446820e725419da10617d86a1b69ada6d881d3", size = 6375414, upload-time = "2025-08-14T12:04:59.861Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/f8/1ef0129fba9a555c658e22af68989f35e7ba7b9136f25758809efec0cd6e/pyproj-3.7.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fc52ba896cfc3214dc9f9ca3c0677a623e8fdd096b257c14a31e719d21ff3fdd", size = 6262501, upload-time = "2025-08-14T12:05:01.39Z" },
+ { url = "https://files.pythonhosted.org/packages/42/17/c2b050d3f5b71b6edd0d96ae16c990fdc42a5f1366464a5c2772146de33a/pyproj-3.7.2-cp314-cp314-macosx_13_0_x86_64.whl", hash = "sha256:2aaa328605ace41db050d06bac1adc11f01b71fe95c18661497763116c3a0f02", size = 6214541, upload-time = "2025-08-14T12:05:03.166Z" },
+ { url = "https://files.pythonhosted.org/packages/03/68/68ada9c8aea96ded09a66cfd9bf87aa6db8c2edebe93f5bf9b66b0143fbc/pyproj-3.7.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:35dccbce8201313c596a970fde90e33605248b66272595c061b511c8100ccc08", size = 4617456, upload-time = "2025-08-14T12:05:04.563Z" },
+ { url = "https://files.pythonhosted.org/packages/81/e4/4c50ceca7d0e937977866b02cb64e6ccf4df979a5871e521f9e255df6073/pyproj-3.7.2-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:25b0b7cb0042444c29a164b993c45c1b8013d6c48baa61dc1160d834a277e83b", size = 9615590, upload-time = "2025-08-14T12:05:06.094Z" },
+ { url = "https://files.pythonhosted.org/packages/05/1e/ada6fb15a1d75b5bd9b554355a69a798c55a7dcc93b8d41596265c1772e3/pyproj-3.7.2-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:85def3a6388e9ba51f964619aa002a9d2098e77c6454ff47773bb68871024281", size = 9474960, upload-time = "2025-08-14T12:05:07.973Z" },
+ { url = "https://files.pythonhosted.org/packages/51/07/9d48ad0a8db36e16f842f2c8a694c1d9d7dcf9137264846bef77585a71f3/pyproj-3.7.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b1bccefec3875ab81eabf49059e2b2ea77362c178b66fd3528c3e4df242f1516", size = 10799478, upload-time = "2025-08-14T12:05:14.102Z" },
+ { url = "https://files.pythonhosted.org/packages/85/cf/2f812b529079f72f51ff2d6456b7fef06c01735e5cfd62d54ffb2b548028/pyproj-3.7.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d5371ca114d6990b675247355a801925814eca53e6c4b2f1b5c0a956336ee36e", size = 10710030, upload-time = "2025-08-14T12:05:16.317Z" },
+ { url = "https://files.pythonhosted.org/packages/99/9b/4626a19e1f03eba4c0e77b91a6cf0f73aa9cb5d51a22ee385c22812bcc2c/pyproj-3.7.2-cp314-cp314-win32.whl", hash = "sha256:77f066626030f41be543274f5ac79f2a511fe89860ecd0914f22131b40a0ec25", size = 5991181, upload-time = "2025-08-14T12:05:19.492Z" },
+ { url = "https://files.pythonhosted.org/packages/04/b2/5a6610554306a83a563080c2cf2c57565563eadd280e15388efa00fb5b33/pyproj-3.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:5a964da1696b8522806f4276ab04ccfff8f9eb95133a92a25900697609d40112", size = 6434721, upload-time = "2025-08-14T12:05:21.022Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/ce/6c910ea2e1c74ef673c5d48c482564b8a7824a44c4e35cca2e765b68cfcc/pyproj-3.7.2-cp314-cp314-win_arm64.whl", hash = "sha256:e258ab4dbd3cf627809067c0ba8f9884ea76c8e5999d039fb37a1619c6c3e1f6", size = 6363821, upload-time = "2025-08-14T12:05:22.627Z" },
+ { url = "https://files.pythonhosted.org/packages/e4/e4/5532f6f7491812ba782a2177fe9de73fd8e2912b59f46a1d056b84b9b8f2/pyproj-3.7.2-cp314-cp314t-macosx_13_0_x86_64.whl", hash = "sha256:bbbac2f930c6d266f70ec75df35ef851d96fdb3701c674f42fd23a9314573b37", size = 6241773, upload-time = "2025-08-14T12:05:24.577Z" },
+ { url = "https://files.pythonhosted.org/packages/20/1f/0938c3f2bbbef1789132d1726d9b0e662f10cfc22522743937f421ad664e/pyproj-3.7.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:b7544e0a3d6339dc9151e9c8f3ea62a936ab7cc446a806ec448bbe86aebb979b", size = 4652537, upload-time = "2025-08-14T12:05:26.391Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/a8/488b1ed47d25972f33874f91f09ca8f2227902f05f63a2b80dc73e7b1c97/pyproj-3.7.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:f7f5133dca4c703e8acadf6f30bc567d39a42c6af321e7f81975c2518f3ed357", size = 9940864, upload-time = "2025-08-14T12:05:27.985Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/cc/7f4c895d0cb98e47b6a85a6d79eaca03eb266129eed2f845125c09cf31ff/pyproj-3.7.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:5aff3343038d7426aa5076f07feb88065f50e0502d1b0d7c22ddfdd2c75a3f81", size = 9688868, upload-time = "2025-08-14T12:05:30.425Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/b7/c7e306b8bb0f071d9825b753ee4920f066c40fbfcce9372c4f3cfb2fc4ed/pyproj-3.7.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b0552178c61f2ac1c820d087e8ba6e62b29442debddbb09d51c4bf8acc84d888", size = 11045910, upload-time = "2025-08-14T12:05:32.507Z" },
+ { url = "https://files.pythonhosted.org/packages/42/fb/538a4d2df695980e2dde5c04d965fbdd1fe8c20a3194dc4aaa3952a4d1be/pyproj-3.7.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:47d87db2d2c436c5fd0409b34d70bb6cdb875cca2ebe7a9d1c442367b0ab8d59", size = 10895724, upload-time = "2025-08-14T12:05:35.465Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/8b/a3f0618b03957de9db5489a04558a8826f43906628bb0b766033aa3b5548/pyproj-3.7.2-cp314-cp314t-win32.whl", hash = "sha256:c9b6f1d8ad3e80a0ee0903a778b6ece7dca1d1d40f6d114ae01bc8ddbad971aa", size = 6056848, upload-time = "2025-08-14T12:05:37.553Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/56/413240dd5149dd3291eda55aa55a659da4431244a2fd1319d0ae89407cfb/pyproj-3.7.2-cp314-cp314t-win_amd64.whl", hash = "sha256:1914e29e27933ba6f9822663ee0600f169014a2859f851c054c88cf5ea8a333c", size = 6517676, upload-time = "2025-08-14T12:05:39.126Z" },
+ { url = "https://files.pythonhosted.org/packages/15/73/a7141a1a0559bf1a7aa42a11c879ceb19f02f5c6c371c6d57fd86cefd4d1/pyproj-3.7.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d9d25bae416a24397e0d85739f84d323b55f6511e45a522dd7d7eae70d10c7e4", size = 6391844, upload-time = "2025-08-14T12:05:40.745Z" },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
+]
+
+[[package]]
+name = "pytest-cov"
+version = "7.0.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "coverage", extra = ["toml"] },
+ { name = "pluggy" },
+ { name = "pytest" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+]
+
+[[package]]
+name = "pytokens"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/8d/a762be14dae1c3bf280202ba3172020b2b0b4c537f94427435f19c413b72/pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a", size = 17644, upload-time = "2025-11-05T13:36:35.34Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/84/25/d9db8be44e205a124f6c98bc0324b2bb149b7431c53877fc6d1038dddaf5/pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3", size = 12195, upload-time = "2025-11-05T13:36:33.183Z" },
+]
+
+[[package]]
+name = "pytz"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
+ { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
+ { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
+ { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
+ { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
+ { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
+ { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
+ { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
+ { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
+ { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
+]
+
+[[package]]
+name = "pyzmq"
+version = "27.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi", marker = "implementation_name == 'pypy'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" },
+ { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" },
+ { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" },
+ { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" },
+ { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" },
+ { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" },
+ { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436, upload-time = "2025-09-08T23:08:20.801Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301, upload-time = "2025-09-08T23:08:22.47Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197, upload-time = "2025-09-08T23:08:24.286Z" },
+ { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275, upload-time = "2025-09-08T23:08:26.063Z" },
+ { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469, upload-time = "2025-09-08T23:08:27.623Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961, upload-time = "2025-09-08T23:08:29.672Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282, upload-time = "2025-09-08T23:08:31.349Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468, upload-time = "2025-09-08T23:08:33.543Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394, upload-time = "2025-09-08T23:08:35.51Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" },
+ { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" },
+ { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" },
+ { url = "https://files.pythonhosted.org/packages/87/45/19efbb3000956e82d0331bafca5d9ac19ea2857722fa2caacefb6042f39d/pyzmq-27.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a", size = 1341197, upload-time = "2025-09-08T23:08:44.973Z" },
+ { url = "https://files.pythonhosted.org/packages/48/43/d72ccdbf0d73d1343936296665826350cb1e825f92f2db9db3e61c2162a2/pyzmq-27.1.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea", size = 897175, upload-time = "2025-09-08T23:08:46.601Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/2e/a483f73a10b65a9ef0161e817321d39a770b2acf8bcf3004a28d90d14a94/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96", size = 660427, upload-time = "2025-09-08T23:08:48.187Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/d2/5f36552c2d3e5685abe60dfa56f91169f7a2d99bbaf67c5271022ab40863/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d", size = 847929, upload-time = "2025-09-08T23:08:49.76Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/2a/404b331f2b7bf3198e9945f75c4c521f0c6a3a23b51f7a4a401b94a13833/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146", size = 1650193, upload-time = "2025-09-08T23:08:51.7Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/0b/f4107e33f62a5acf60e3ded67ed33d79b4ce18de432625ce2fc5093d6388/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd", size = 2024388, upload-time = "2025-09-08T23:08:53.393Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/01/add31fe76512642fd6e40e3a3bd21f4b47e242c8ba33efb6809e37076d9b/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a", size = 1885316, upload-time = "2025-09-08T23:08:55.702Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/59/a5f38970f9bf07cee96128de79590bb354917914a9be11272cfc7ff26af0/pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92", size = 587472, upload-time = "2025-09-08T23:08:58.18Z" },
+ { url = "https://files.pythonhosted.org/packages/70/d8/78b1bad170f93fcf5e3536e70e8fadac55030002275c9a29e8f5719185de/pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0", size = 661401, upload-time = "2025-09-08T23:08:59.802Z" },
+ { url = "https://files.pythonhosted.org/packages/81/d6/4bfbb40c9a0b42fc53c7cf442f6385db70b40f74a783130c5d0a5aa62228/pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7", size = 575170, upload-time = "2025-09-08T23:09:01.418Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" },
+ { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" },
+ { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" },
+]
+
+[[package]]
+name = "recommonmark"
+version = "0.7.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "commonmark" },
+ { name = "docutils" },
+ { name = "sphinx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/1c/00/3dd2bdc4184b0ce754b5b446325abf45c2e0a347e022292ddc44670f628c/recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67", size = 34444, upload-time = "2020-12-17T19:24:56.523Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c6/77/ed589c75db5d02a77a1d5d2d9abc63f29676467d396c64277f98b50b79c2/recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f", size = 10214, upload-time = "2020-12-17T19:24:55.137Z" },
+]
+
+[[package]]
+name = "requests"
+version = "2.32.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "certifi" },
+ { name = "charset-normalizer" },
+ { name = "idna" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
+]
+
+[[package]]
+name = "roman-numerals-py"
+version = "3.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" },
+]
+
+[[package]]
+name = "rustworkx"
+version = "0.17.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e7/b0/66d96f02120f79eeed86b5c5be04029b6821155f31ed4907a4e9f1460671/rustworkx-0.17.1.tar.gz", hash = "sha256:59ea01b4e603daffa4e8827316c1641eef18ae9032f0b1b14aa0181687e3108e", size = 399407, upload-time = "2025-09-15T16:29:46.429Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/24/8972ed631fa05fdec05a7bb7f1fc0f8e78ee761ab37e8a93d1ed396ba060/rustworkx-0.17.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c08fb8db041db052da404839b064ebfb47dcce04ba9a3e2eb79d0c65ab011da4", size = 2257491, upload-time = "2025-08-13T01:43:31.466Z" },
+ { url = "https://files.pythonhosted.org/packages/23/ae/7b6bbae5e0487ee42072dc6a46edf5db9731a0701ed648db22121fb7490c/rustworkx-0.17.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:4ef8e327dadf6500edd76fedb83f6d888b9266c58bcdbffd5a40c33835c9dd26", size = 2040175, upload-time = "2025-08-13T01:43:33.762Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/ea/c17fb9428c8f0dcc605596f9561627a5b9ef629d356204ee5088cfcf52c6/rustworkx-0.17.1-cp39-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b809e0aa2927c68574b196f993233e269980918101b0dd235289c4f3ddb2115", size = 2324771, upload-time = "2025-08-13T01:43:35.553Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/40/ec8b3b8b0f8c0b768690c454b8dcc2781b4f2c767f9f1215539c7909e35b/rustworkx-0.17.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e82c46a92fb0fd478b7372e15ca524c287485fdecaed37b8bb68f4df2720f2", size = 2068584, upload-time = "2025-08-13T01:43:37.261Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/22/713b900d320d06ce8677e71bba0ec5df0037f1d83270bff5db3b271c10d7/rustworkx-0.17.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42170075d8a7319e89ff63062c2f1d1116ced37b6f044f3bf36d10b60a107aa4", size = 2380949, upload-time = "2025-08-13T01:52:17.435Z" },
+ { url = "https://files.pythonhosted.org/packages/20/4b/54be84b3b41a19caf0718a2b6bb280dde98c8626c809c969f16aad17458f/rustworkx-0.17.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65cba97fa95470239e2d65eb4db1613f78e4396af9f790ff771b0e5476bfd887", size = 2562069, upload-time = "2025-08-13T02:09:27.222Z" },
+ { url = "https://files.pythonhosted.org/packages/39/5b/281bb21d091ab4e36cf377088366d55d0875fa2347b3189c580ec62b44c7/rustworkx-0.17.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246cc252053f89e36209535b9c58755960197e6ae08d48d3973760141c62ac95", size = 2221186, upload-time = "2025-08-13T01:43:38.598Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/2d/30a941a21b81e9db50c4c3ef8a64c5ee1c8eea3a90506ca0326ce39d021f/rustworkx-0.17.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c10d25e9f0e87d6a273d1ea390b636b4fb3fede2094bf0cb3fe565d696a91b48", size = 2123510, upload-time = "2025-08-13T01:43:40.288Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/ef/c9199e4b6336ee5a9f1979c11b5779c5cf9ab6f8386e0b9a96c8ffba7009/rustworkx-0.17.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:48784a673cf8d04f3cd246fa6b53fd1ccc4d83304503463bd561c153517bccc1", size = 2302783, upload-time = "2025-08-13T01:43:42.073Z" },
+ { url = "https://files.pythonhosted.org/packages/30/3d/a49ab633e99fca4ccbb9c9f4bd41904186c175ebc25c530435529f71c480/rustworkx-0.17.1-cp39-abi3-win32.whl", hash = "sha256:5dbc567833ff0a8ad4580a4fe4bde92c186d36b4c45fca755fb1792e4fafe9b5", size = 1931541, upload-time = "2025-08-13T01:43:43.415Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/ec/cee878c1879b91ab8dc7d564535d011307839a2fea79d2a650413edf53be/rustworkx-0.17.1-cp39-abi3-win_amd64.whl", hash = "sha256:d0a48fb62adabd549f9f02927c3a159b51bf654c7388a12fc16d45452d5703ea", size = 2055049, upload-time = "2025-08-13T01:43:44.926Z" },
+]
+
+[[package]]
+name = "scipy"
+version = "1.16.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9b/5f/6f37d7439de1455ce9c5a556b8d1db0979f03a796c030bafdf08d35b7bf9/scipy-1.16.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:40be6cf99e68b6c4321e9f8782e7d5ff8265af28ef2cd56e9c9b2638fa08ad97", size = 36630881, upload-time = "2025-10-28T17:31:47.104Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/89/d70e9f628749b7e4db2aa4cd89735502ff3f08f7b9b27d2e799485987cd9/scipy-1.16.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:8be1ca9170fcb6223cc7c27f4305d680ded114a1567c0bd2bfcbf947d1b17511", size = 28941012, upload-time = "2025-10-28T17:31:53.411Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/a8/0e7a9a6872a923505dbdf6bb93451edcac120363131c19013044a1e7cb0c/scipy-1.16.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bea0a62734d20d67608660f69dcda23e7f90fb4ca20974ab80b6ed40df87a005", size = 20931935, upload-time = "2025-10-28T17:31:57.361Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/c7/020fb72bd79ad798e4dbe53938543ecb96b3a9ac3fe274b7189e23e27353/scipy-1.16.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:2a207a6ce9c24f1951241f4693ede2d393f59c07abc159b2cb2be980820e01fb", size = 23534466, upload-time = "2025-10-28T17:32:01.875Z" },
+ { url = "https://files.pythonhosted.org/packages/be/a0/668c4609ce6dbf2f948e167836ccaf897f95fb63fa231c87da7558a374cd/scipy-1.16.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:532fb5ad6a87e9e9cd9c959b106b73145a03f04c7d57ea3e6f6bb60b86ab0876", size = 33593618, upload-time = "2025-10-28T17:32:06.902Z" },
+ { url = "https://files.pythonhosted.org/packages/ca/6e/8942461cf2636cdae083e3eb72622a7fbbfa5cf559c7d13ab250a5dbdc01/scipy-1.16.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0151a0749efeaaab78711c78422d413c583b8cdd2011a3c1d6c794938ee9fdb2", size = 35899798, upload-time = "2025-10-28T17:32:12.665Z" },
+ { url = "https://files.pythonhosted.org/packages/79/e8/d0f33590364cdbd67f28ce79368b373889faa4ee959588beddf6daef9abe/scipy-1.16.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7180967113560cca57418a7bc719e30366b47959dd845a93206fbed693c867e", size = 36226154, upload-time = "2025-10-28T17:32:17.961Z" },
+ { url = "https://files.pythonhosted.org/packages/39/c1/1903de608c0c924a1749c590064e65810f8046e437aba6be365abc4f7557/scipy-1.16.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:deb3841c925eeddb6afc1e4e4a45e418d19ec7b87c5df177695224078e8ec733", size = 38878540, upload-time = "2025-10-28T17:32:23.907Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/d0/22ec7036ba0b0a35bccb7f25ab407382ed34af0b111475eb301c16f8a2e5/scipy-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:53c3844d527213631e886621df5695d35e4f6a75f620dca412bcd292f6b87d78", size = 38722107, upload-time = "2025-10-28T17:32:29.921Z" },
+ { url = "https://files.pythonhosted.org/packages/7b/60/8a00e5a524bb3bf8898db1650d350f50e6cffb9d7a491c561dc9826c7515/scipy-1.16.3-cp311-cp311-win_arm64.whl", hash = "sha256:9452781bd879b14b6f055b26643703551320aa8d79ae064a71df55c00286a184", size = 25506272, upload-time = "2025-10-28T17:32:34.577Z" },
+ { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" },
+ { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" },
+ { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" },
+ { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" },
+ { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" },
+ { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" },
+ { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" },
+ { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" },
+ { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" },
+ { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" },
+ { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" },
+ { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" },
+ { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" },
+ { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" },
+ { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" },
+ { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" },
+ { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469, upload-time = "2025-10-28T17:36:08.741Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043, upload-time = "2025-10-28T17:36:16.599Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952, upload-time = "2025-10-28T17:36:22.966Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512, upload-time = "2025-10-28T17:36:29.731Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639, upload-time = "2025-10-28T17:36:37.982Z" },
+ { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729, upload-time = "2025-10-28T17:36:46.547Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251, upload-time = "2025-10-28T17:36:55.161Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681, upload-time = "2025-10-28T17:37:04.1Z" },
+ { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423, upload-time = "2025-10-28T17:38:20.005Z" },
+ { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027, upload-time = "2025-10-28T17:38:24.966Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379, upload-time = "2025-10-28T17:37:14.061Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052, upload-time = "2025-10-28T17:37:21.709Z" },
+ { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183, upload-time = "2025-10-28T17:37:29.559Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174, upload-time = "2025-10-28T17:37:36.306Z" },
+ { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852, upload-time = "2025-10-28T17:37:42.228Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595, upload-time = "2025-10-28T17:37:48.102Z" },
+ { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269, upload-time = "2025-10-28T17:37:53.72Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779, upload-time = "2025-10-28T17:37:59.393Z" },
+ { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128, upload-time = "2025-10-28T17:38:05.259Z" },
+ { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127, upload-time = "2025-10-28T17:38:11.34Z" },
+]
+
+[[package]]
+name = "shapely"
+version = "2.1.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "numpy" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" },
+ { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" },
+ { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" },
+ { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" },
+ { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" },
+ { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" },
+ { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" },
+ { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" },
+ { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" },
+ { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" },
+ { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/90/98ef257c23c46425dc4d1d31005ad7c8d649fe423a38b917db02c30f1f5a/shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8", size = 1832644, upload-time = "2025-09-24T13:50:44.886Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/ab/0bee5a830d209adcd3a01f2d4b70e587cdd9fd7380d5198c064091005af8/shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a", size = 1642887, upload-time = "2025-09-24T13:50:46.735Z" },
+ { url = "https://files.pythonhosted.org/packages/2d/5e/7d7f54ba960c13302584c73704d8c4d15404a51024631adb60b126a4ae88/shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e", size = 2970931, upload-time = "2025-09-24T13:50:48.374Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/a2/83fc37e2a58090e3d2ff79175a95493c664bcd0b653dd75cb9134645a4e5/shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6", size = 3082855, upload-time = "2025-09-24T13:50:50.037Z" },
+ { url = "https://files.pythonhosted.org/packages/44/2b/578faf235a5b09f16b5f02833c53822294d7f21b242f8e2d0cf03fb64321/shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af", size = 3979960, upload-time = "2025-09-24T13:50:51.74Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/04/167f096386120f692cc4ca02f75a17b961858997a95e67a3cb6a7bbd6b53/shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd", size = 4142851, upload-time = "2025-09-24T13:50:53.49Z" },
+ { url = "https://files.pythonhosted.org/packages/48/74/fb402c5a6235d1c65a97348b48cdedb75fb19eca2b1d66d04969fc1c6091/shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350", size = 1541890, upload-time = "2025-09-24T13:50:55.337Z" },
+ { url = "https://files.pythonhosted.org/packages/41/47/3647fe7ad990af60ad98b889657a976042c9988c2807cf322a9d6685f462/shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715", size = 1722151, upload-time = "2025-09-24T13:50:57.153Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/49/63953754faa51ffe7d8189bfbe9ca34def29f8c0e34c67cbe2a2795f269d/shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40", size = 1834130, upload-time = "2025-09-24T13:50:58.49Z" },
+ { url = "https://files.pythonhosted.org/packages/7f/ee/dce001c1984052970ff60eb4727164892fb2d08052c575042a47f5a9e88f/shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b", size = 1642802, upload-time = "2025-09-24T13:50:59.871Z" },
+ { url = "https://files.pythonhosted.org/packages/da/e7/fc4e9a19929522877fa602f705706b96e78376afb7fad09cad5b9af1553c/shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801", size = 3018460, upload-time = "2025-09-24T13:51:02.08Z" },
+ { url = "https://files.pythonhosted.org/packages/a1/18/7519a25db21847b525696883ddc8e6a0ecaa36159ea88e0fef11466384d0/shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0", size = 3095223, upload-time = "2025-09-24T13:51:04.472Z" },
+ { url = "https://files.pythonhosted.org/packages/48/de/b59a620b1f3a129c3fecc2737104a0a7e04e79335bd3b0a1f1609744cf17/shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c", size = 4030760, upload-time = "2025-09-24T13:51:06.455Z" },
+ { url = "https://files.pythonhosted.org/packages/96/b3/c6655ee7232b417562bae192ae0d3ceaadb1cc0ffc2088a2ddf415456cc2/shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99", size = 4170078, upload-time = "2025-09-24T13:51:08.584Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/8e/605c76808d73503c9333af8f6cbe7e1354d2d238bda5f88eea36bfe0f42a/shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf", size = 1559178, upload-time = "2025-09-24T13:51:10.73Z" },
+ { url = "https://files.pythonhosted.org/packages/36/f7/d317eb232352a1f1444d11002d477e54514a4a6045536d49d0c59783c0da/shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c", size = 1739756, upload-time = "2025-09-24T13:51:12.105Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/c4/3ce4c2d9b6aabd27d26ec988f08cb877ba9e6e96086eff81bfea93e688c7/shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223", size = 1831290, upload-time = "2025-09-24T13:51:13.56Z" },
+ { url = "https://files.pythonhosted.org/packages/17/b9/f6ab8918fc15429f79cb04afa9f9913546212d7fb5e5196132a2af46676b/shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c", size = 1641463, upload-time = "2025-09-24T13:51:14.972Z" },
+ { url = "https://files.pythonhosted.org/packages/a5/57/91d59ae525ca641e7ac5551c04c9503aee6f29b92b392f31790fcb1a4358/shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df", size = 2970145, upload-time = "2025-09-24T13:51:16.961Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cb/4948be52ee1da6927831ab59e10d4c29baa2a714f599f1f0d1bc747f5777/shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf", size = 3073806, upload-time = "2025-09-24T13:51:18.712Z" },
+ { url = "https://files.pythonhosted.org/packages/03/83/f768a54af775eb41ef2e7bec8a0a0dbe7d2431c3e78c0a8bdba7ab17e446/shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4", size = 3980803, upload-time = "2025-09-24T13:51:20.37Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/cb/559c7c195807c91c79d38a1f6901384a2878a76fbdf3f1048893a9b7534d/shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc", size = 4133301, upload-time = "2025-09-24T13:51:21.887Z" },
+ { url = "https://files.pythonhosted.org/packages/80/cd/60d5ae203241c53ef3abd2ef27c6800e21afd6c94e39db5315ea0cbafb4a/shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566", size = 1583247, upload-time = "2025-09-24T13:51:23.401Z" },
+ { url = "https://files.pythonhosted.org/packages/74/d4/135684f342e909330e50d31d441ace06bf83c7dc0777e11043f99167b123/shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c", size = 1773019, upload-time = "2025-09-24T13:51:24.873Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/05/a44f3f9f695fa3ada22786dc9da33c933da1cbc4bfe876fe3a100bafe263/shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a", size = 1834137, upload-time = "2025-09-24T13:51:26.665Z" },
+ { url = "https://files.pythonhosted.org/packages/52/7e/4d57db45bf314573427b0a70dfca15d912d108e6023f623947fa69f39b72/shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076", size = 1642884, upload-time = "2025-09-24T13:51:28.029Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/27/4e29c0a55d6d14ad7422bf86995d7ff3f54af0eba59617eb95caf84b9680/shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1", size = 3018320, upload-time = "2025-09-24T13:51:29.903Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/bb/992e6a3c463f4d29d4cd6ab8963b75b1b1040199edbd72beada4af46bde5/shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0", size = 3094931, upload-time = "2025-09-24T13:51:32.699Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/16/82e65e21070e473f0ed6451224ed9fa0be85033d17e0c6e7213a12f59d12/shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26", size = 4030406, upload-time = "2025-09-24T13:51:34.189Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/75/c24ed871c576d7e2b64b04b1fe3d075157f6eb54e59670d3f5ffb36e25c7/shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0", size = 4169511, upload-time = "2025-09-24T13:51:36.297Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/f7/b3d1d6d18ebf55236eec1c681ce5e665742aab3c0b7b232720a7d43df7b6/shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735", size = 1602607, upload-time = "2025-09-24T13:51:37.757Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/f6/f09272a71976dfc138129b8faf435d064a811ae2f708cb147dccdf7aacdb/shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9", size = 1796682, upload-time = "2025-09-24T13:51:39.233Z" },
+]
+
+[[package]]
+name = "six"
+version = "1.17.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "3.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" },
+]
+
+[[package]]
+name = "sphinx"
+version = "8.2.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "alabaster" },
+ { name = "babel" },
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "docutils" },
+ { name = "imagesize" },
+ { name = "jinja2" },
+ { name = "packaging" },
+ { name = "pygments" },
+ { name = "requests" },
+ { name = "roman-numerals-py" },
+ { name = "snowballstemmer" },
+ { name = "sphinxcontrib-applehelp" },
+ { name = "sphinxcontrib-devhelp" },
+ { name = "sphinxcontrib-htmlhelp" },
+ { name = "sphinxcontrib-jsmath" },
+ { name = "sphinxcontrib-qthelp" },
+ { name = "sphinxcontrib-serializinghtml" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" },
+]
+
+[[package]]
+name = "sphinx-copybutton"
+version = "0.5.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sphinx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/2b/a964715e7f5295f77509e59309959f4125122d648f86b4fe7d70ca1d882c/sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd", size = 23039, upload-time = "2023-04-14T08:10:22.998Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/48/1ea60e74949eecb12cdd6ac43987f9fd331156388dcc2319b45e2ebb81bf/sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e", size = 13343, upload-time = "2023-04-14T08:10:20.844Z" },
+]
+
+[[package]]
+name = "sphinx-rtd-theme"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "docutils" },
+ { name = "sphinx" },
+ { name = "sphinxcontrib-jquery" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-jquery"
+version = "4.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "sphinx" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" },
+]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "2.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" },
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "asttokens" },
+ { name = "executing" },
+ { name = "pure-eval" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
+]
+
+[[package]]
+name = "tomli"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
+ { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
+ { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
+ { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
+ { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
+ { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
+ { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
+ { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
+ { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
+ { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" },
+ { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" },
+ { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" },
+ { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" },
+ { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" },
+ { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" },
+ { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" },
+ { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" },
+ { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" },
+ { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" },
+ { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" },
+ { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" },
+ { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" },
+ { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" },
+ { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" },
+ { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" },
+ { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
+]
+
+[[package]]
+name = "tornado"
+version = "6.5.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" },
+ { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" },
+ { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" },
+ { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" },
+]
+
+[[package]]
+name = "tqdm"
+version = "4.67.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
+]
+
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
+]
+
+[[package]]
+name = "tzdata"
+version = "2025.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
+]
+
+[[package]]
+name = "urllib3"
+version = "2.6.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/5e/1d/0f3a93cca1ac5e8287842ed4eebbd0f7a991315089b1a0b01c7788aa7b63/urllib3-2.6.1.tar.gz", hash = "sha256:5379eb6e1aba4088bae84f8242960017ec8d8e3decf30480b3a1abdaa9671a3f", size = 432678, upload-time = "2025-12-08T15:25:26.773Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/56/190ceb8cb10511b730b564fb1e0293fa468363dbad26145c34928a60cb0c/urllib3-2.6.1-py3-none-any.whl", hash = "sha256:e67d06fe947c36a7ca39f4994b08d73922d40e6cca949907be05efa6fd75110b", size = 131138, upload-time = "2025-12-08T15:25:25.51Z" },
+]
+
+[[package]]
+name = "virtualenv"
+version = "20.35.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "distlib" },
+ { name = "filelock" },
+ { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846bd9df527390ecc26b3805a0c5989048c210e22c5ca9/virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c", size = 6028799, upload-time = "2025-10-29T06:57:40.511Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" },
+]
+
+[[package]]
+name = "wcwidth"
+version = "0.2.14"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
+]
+
+[[package]]
+name = "widgetsnbextension"
+version = "4.0.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/bd/f4/c67440c7fb409a71b7404b7aefcd7569a9c0d6bd071299bf4198ae7a5d95/widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9", size = 1097402, upload-time = "2025-11-01T21:15:55.178Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3f/0e/fa3b193432cfc60c93b42f3be03365f5f909d2b3ea410295cf36df739e31/widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366", size = 2196503, upload-time = "2025-11-01T21:15:53.565Z" },
+]
diff --git a/versioneer.py b/versioneer.py
deleted file mode 100644
index 1e3753e6..00000000
--- a/versioneer.py
+++ /dev/null
@@ -1,2277 +0,0 @@
-
-# Version: 0.29
-
-"""The Versioneer - like a rocketeer, but for versions.
-
-The Versioneer
-==============
-
-* like a rocketeer, but for versions!
-* https://github.com/python-versioneer/python-versioneer
-* Brian Warner
-* License: Public Domain (Unlicense)
-* Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3
-* [![Latest Version][pypi-image]][pypi-url]
-* [![Build Status][travis-image]][travis-url]
-
-This is a tool for managing a recorded version number in setuptools-based
-python projects. The goal is to remove the tedious and error-prone "update
-the embedded version string" step from your release process. Making a new
-release should be as easy as recording a new tag in your version-control
-system, and maybe making new tarballs.
-
-
-## Quick Install
-
-Versioneer provides two installation modes. The "classic" vendored mode installs
-a copy of versioneer into your repository. The experimental build-time dependency mode
-is intended to allow you to skip this step and simplify the process of upgrading.
-
-### Vendored mode
-
-* `pip install versioneer` to somewhere in your $PATH
- * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
- available, so you can also use `conda install -c conda-forge versioneer`
-* add a `[tool.versioneer]` section to your `pyproject.toml` or a
- `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
- * Note that you will need to add `tomli; python_version < "3.11"` to your
- build-time dependencies if you use `pyproject.toml`
-* run `versioneer install --vendor` in your source tree, commit the results
-* verify version information with `python setup.py version`
-
-### Build-time dependency mode
-
-* `pip install versioneer` to somewhere in your $PATH
- * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
- available, so you can also use `conda install -c conda-forge versioneer`
-* add a `[tool.versioneer]` section to your `pyproject.toml` or a
- `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
-* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`)
- to the `requires` key of the `build-system` table in `pyproject.toml`:
- ```toml
- [build-system]
- requires = ["setuptools", "versioneer[toml]"]
- build-backend = "setuptools.build_meta"
- ```
-* run `versioneer install --no-vendor` in your source tree, commit the results
-* verify version information with `python setup.py version`
-
-## Version Identifiers
-
-Source trees come from a variety of places:
-
-* a version-control system checkout (mostly used by developers)
-* a nightly tarball, produced by build automation
-* a snapshot tarball, produced by a web-based VCS browser, like github's
- "tarball from tag" feature
-* a release tarball, produced by "setup.py sdist", distributed through PyPI
-
-Within each source tree, the version identifier (either a string or a number,
-this tool is format-agnostic) can come from a variety of places:
-
-* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
- about recent "tags" and an absolute revision-id
-* the name of the directory into which the tarball was unpacked
-* an expanded VCS keyword ($Id$, etc)
-* a `_version.py` created by some earlier build step
-
-For released software, the version identifier is closely related to a VCS
-tag. Some projects use tag names that include more than just the version
-string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
-needs to strip the tag prefix to extract the version identifier. For
-unreleased software (between tags), the version identifier should provide
-enough information to help developers recreate the same tree, while also
-giving them an idea of roughly how old the tree is (after version 1.2, before
-version 1.3). Many VCS systems can report a description that captures this,
-for example `git describe --tags --dirty --always` reports things like
-"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
-0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes).
-
-The version identifier is used for multiple purposes:
-
-* to allow the module to self-identify its version: `myproject.__version__`
-* to choose a name and prefix for a 'setup.py sdist' tarball
-
-## Theory of Operation
-
-Versioneer works by adding a special `_version.py` file into your source
-tree, where your `__init__.py` can import it. This `_version.py` knows how to
-dynamically ask the VCS tool for version information at import time.
-
-`_version.py` also contains `$Revision$` markers, and the installation
-process marks `_version.py` to have this marker rewritten with a tag name
-during the `git archive` command. As a result, generated tarballs will
-contain enough information to get the proper version.
-
-To allow `setup.py` to compute a version too, a `versioneer.py` is added to
-the top level of your source tree, next to `setup.py` and the `setup.cfg`
-that configures it. This overrides several distutils/setuptools commands to
-compute the version when invoked, and changes `setup.py build` and `setup.py
-sdist` to replace `_version.py` with a small static file that contains just
-the generated version data.
-
-## Installation
-
-See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
-
-## Version-String Flavors
-
-Code which uses Versioneer can learn about its version string at runtime by
-importing `_version` from your main `__init__.py` file and running the
-`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
-import the top-level `versioneer.py` and run `get_versions()`.
-
-Both functions return a dictionary with different flavors of version
-information:
-
-* `['version']`: A condensed version string, rendered using the selected
- style. This is the most commonly used value for the project's version
- string. The default "pep440" style yields strings like `0.11`,
- `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
- below for alternative styles.
-
-* `['full-revisionid']`: detailed revision identifier. For Git, this is the
- full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
-
-* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
- commit date in ISO 8601 format. This will be None if the date is not
- available.
-
-* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
- this is only accurate if run in a VCS checkout, otherwise it is likely to
- be False or None
-
-* `['error']`: if the version string could not be computed, this will be set
- to a string describing the problem, otherwise it will be None. It may be
- useful to throw an exception in setup.py if this is set, to avoid e.g.
- creating tarballs with a version string of "unknown".
-
-Some variants are more useful than others. Including `full-revisionid` in a
-bug report should allow developers to reconstruct the exact code being tested
-(or indicate the presence of local changes that should be shared with the
-developers). `version` is suitable for display in an "about" box or a CLI
-`--version` output: it can be easily compared against release notes and lists
-of bugs fixed in various releases.
-
-The installer adds the following text to your `__init__.py` to place a basic
-version in `YOURPROJECT.__version__`:
-
- from ._version import get_versions
- __version__ = get_versions()['version']
- del get_versions
-
-## Styles
-
-The setup.cfg `style=` configuration controls how the VCS information is
-rendered into a version string.
-
-The default style, "pep440", produces a PEP440-compliant string, equal to the
-un-prefixed tag name for actual releases, and containing an additional "local
-version" section with more detail for in-between builds. For Git, this is
-TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
-tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
-that this commit is two revisions ("+2") beyond the "0.11" tag. For released
-software (exactly equal to a known tag), the identifier will only contain the
-stripped tag, e.g. "0.11".
-
-Other styles are available. See [details.md](details.md) in the Versioneer
-source tree for descriptions.
-
-## Debugging
-
-Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
-to return a version of "0+unknown". To investigate the problem, run `setup.py
-version`, which will run the version-lookup code in a verbose mode, and will
-display the full contents of `get_versions()` (including the `error` string,
-which may help identify what went wrong).
-
-## Known Limitations
-
-Some situations are known to cause problems for Versioneer. This details the
-most significant ones. More can be found on Github
-[issues page](https://github.com/python-versioneer/python-versioneer/issues).
-
-### Subprojects
-
-Versioneer has limited support for source trees in which `setup.py` is not in
-the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
-two common reasons why `setup.py` might not be in the root:
-
-* Source trees which contain multiple subprojects, such as
- [Buildbot](https://github.com/buildbot/buildbot), which contains both
- "master" and "slave" subprojects, each with their own `setup.py`,
- `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
- distributions (and upload multiple independently-installable tarballs).
-* Source trees whose main purpose is to contain a C library, but which also
- provide bindings to Python (and perhaps other languages) in subdirectories.
-
-Versioneer will look for `.git` in parent directories, and most operations
-should get the right version string. However `pip` and `setuptools` have bugs
-and implementation details which frequently cause `pip install .` from a
-subproject directory to fail to find a correct version string (so it usually
-defaults to `0+unknown`).
-
-`pip install --editable .` should work correctly. `setup.py install` might
-work too.
-
-Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
-some later version.
-
-[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
-this issue. The discussion in
-[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
-issue from the Versioneer side in more detail.
-[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
-[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
-pip to let Versioneer work correctly.
-
-Versioneer-0.16 and earlier only looked for a `.git` directory next to the
-`setup.cfg`, so subprojects were completely unsupported with those releases.
-
-### Editable installs with setuptools <= 18.5
-
-`setup.py develop` and `pip install --editable .` allow you to install a
-project into a virtualenv once, then continue editing the source code (and
-test) without re-installing after every change.
-
-"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
-convenient way to specify executable scripts that should be installed along
-with the python package.
-
-These both work as expected when using modern setuptools. When using
-setuptools-18.5 or earlier, however, certain operations will cause
-`pkg_resources.DistributionNotFound` errors when running the entrypoint
-script, which must be resolved by re-installing the package. This happens
-when the install happens with one version, then the egg_info data is
-regenerated while a different version is checked out. Many setup.py commands
-cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
-a different virtualenv), so this can be surprising.
-
-[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
-this one, but upgrading to a newer version of setuptools should probably
-resolve it.
-
-
-## Updating Versioneer
-
-To upgrade your project to a new release of Versioneer, do the following:
-
-* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg` and `pyproject.toml`, if necessary,
- to include any new configuration settings indicated by the release notes.
- See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install --[no-]vendor` in your source tree, to replace
- `SRC/_version.py`
-* commit any changed files
-
-## Future Directions
-
-This tool is designed to make it easily extended to other version-control
-systems: all VCS-specific components are in separate directories like
-src/git/ . The top-level `versioneer.py` script is assembled from these
-components by running make-versioneer.py . In the future, make-versioneer.py
-will take a VCS name as an argument, and will construct a version of
-`versioneer.py` that is specific to the given VCS. It might also take the
-configuration arguments that are currently provided manually during
-installation by editing setup.py . Alternatively, it might go the other
-direction and include code from all supported VCS systems, reducing the
-number of intermediate scripts.
-
-## Similar projects
-
-* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
- dependency
-* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
- versioneer
-* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
- plugin
-
-## License
-
-To make Versioneer easier to embed, all its code is dedicated to the public
-domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the "Unlicense", as described in
-https://unlicense.org/.
-
-[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
-[pypi-url]: https://pypi.python.org/pypi/versioneer/
-[travis-image]:
-https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
-[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
-
-"""
-# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
-# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
-# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
-# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
-# pylint:disable=attribute-defined-outside-init,too-many-arguments
-
-import configparser
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-from pathlib import Path
-from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union
-from typing import NoReturn
-import functools
-
-have_tomllib = True
-if sys.version_info >= (3, 11):
- import tomllib
-else:
- try:
- import tomli as tomllib
- except ImportError:
- have_tomllib = False
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
- VCS: str
- style: str
- tag_prefix: str
- versionfile_source: str
- versionfile_build: Optional[str]
- parentdir_prefix: Optional[str]
- verbose: Optional[bool]
-
-
-def get_root() -> str:
- """Get the project root directory.
-
- We require that all commands are run from the project root, i.e. the
- directory that contains setup.py, setup.cfg, and versioneer.py .
- """
- root = os.path.realpath(os.path.abspath(os.getcwd()))
- setup_py = os.path.join(root, "setup.py")
- pyproject_toml = os.path.join(root, "pyproject.toml")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (
- os.path.exists(setup_py)
- or os.path.exists(pyproject_toml)
- or os.path.exists(versioneer_py)
- ):
- # allow 'python path/to/setup.py COMMAND'
- root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
- setup_py = os.path.join(root, "setup.py")
- pyproject_toml = os.path.join(root, "pyproject.toml")
- versioneer_py = os.path.join(root, "versioneer.py")
- if not (
- os.path.exists(setup_py)
- or os.path.exists(pyproject_toml)
- or os.path.exists(versioneer_py)
- ):
- err = ("Versioneer was unable to run the project root directory. "
- "Versioneer requires setup.py to be executed from "
- "its immediate directory (like 'python setup.py COMMAND'), "
- "or in a way that lets it use sys.argv[0] to find the root "
- "(like 'python path/to/setup.py COMMAND').")
- raise VersioneerBadRootError(err)
- try:
- # Certain runtime workflows (setup.py install/develop in a setuptools
- # tree) execute all dependencies in a single python process, so
- # "versioneer" may be imported multiple times, and python's shared
- # module-import table will cache the first one. So we can't use
- # os.path.dirname(__file__), as that will find whichever
- # versioneer.py was first imported, even in later projects.
- my_path = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(my_path)[0])
- vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
- if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
- print("Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(my_path), versioneer_py))
- except NameError:
- pass
- return root
-
-
-def get_config_from_root(root: str) -> VersioneerConfig:
- """Read the project setup.cfg file to determine Versioneer config."""
- # This might raise OSError (if setup.cfg is missing), or
- # configparser.NoSectionError (if it lacks a [versioneer] section), or
- # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
- # the top of versioneer.py for instructions on writing your setup.cfg .
- root_pth = Path(root)
- pyproject_toml = root_pth / "pyproject.toml"
- setup_cfg = root_pth / "setup.cfg"
- section: Union[Dict[str, Any], configparser.SectionProxy, None] = None
- if pyproject_toml.exists() and have_tomllib:
- try:
- with open(pyproject_toml, 'rb') as fobj:
- pp = tomllib.load(fobj)
- section = pp['tool']['versioneer']
- except (tomllib.TOMLDecodeError, KeyError) as e:
- print(f"Failed to load config from {pyproject_toml}: {e}")
- print("Try to load it from setup.cfg")
- if not section:
- parser = configparser.ConfigParser()
- with open(setup_cfg) as cfg_file:
- parser.read_file(cfg_file)
- parser.get("versioneer", "VCS") # raise error if missing
-
- section = parser["versioneer"]
-
- # `cast`` really shouldn't be used, but its simplest for the
- # common VersioneerConfig users at the moment. We verify against
- # `None` values elsewhere where it matters
-
- cfg = VersioneerConfig()
- cfg.VCS = section['VCS']
- cfg.style = section.get("style", "")
- cfg.versionfile_source = cast(str, section.get("versionfile_source"))
- cfg.versionfile_build = section.get("versionfile_build")
- cfg.tag_prefix = cast(str, section.get("tag_prefix"))
- if cfg.tag_prefix in ("''", '""', None):
- cfg.tag_prefix = ""
- cfg.parentdir_prefix = section.get("parentdir_prefix")
- if isinstance(section, configparser.SectionProxy):
- # Make sure configparser translates to bool
- cfg.verbose = section.getboolean("verbose")
- else:
- cfg.verbose = section.get("verbose")
-
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
-
-
-def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
- """Create decorator to mark a method as the handler of a VCS."""
- def decorate(f: Callable) -> Callable:
- """Store f in HANDLERS[vcs][method]."""
- HANDLERS.setdefault(vcs, {})[method] = f
- return f
- return decorate
-
-
-def run_command(
- commands: List[str],
- args: List[str],
- cwd: Optional[str] = None,
- verbose: bool = False,
- hide_stderr: bool = False,
- env: Optional[Dict[str, str]] = None,
-) -> Tuple[Optional[str], Optional[int]]:
- """Call the given command(s)."""
- assert isinstance(commands, list)
- process = None
-
- popen_kwargs: Dict[str, Any] = {}
- if sys.platform == "win32":
- # This hides the console window if pythonw.exe is used
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- popen_kwargs["startupinfo"] = startupinfo
-
- for command in commands:
- try:
- dispcmd = str([command] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- process = subprocess.Popen([command] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None), **popen_kwargs)
- break
- except OSError as e:
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %s" % dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %s" % (commands,))
- return None, None
- stdout = process.communicate()[0].strip().decode()
- if process.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % dispcmd)
- print("stdout was %s" % stdout)
- return None, process.returncode
- return stdout, process.returncode
-
-
-LONG_VERSION_PY['git'] = r'''
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain.
-# Generated by versioneer-0.29
-# https://github.com/python-versioneer/python-versioneer
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-from typing import Any, Callable, Dict, List, Optional, Tuple
-import functools
-
-
-def get_keywords() -> Dict[str, str]:
- """Get the keywords needed to look up the version information."""
- # these strings will be replaced by git during git-archive.
- # setup.py/versioneer.py will grep for the variable names, so they must
- # each be defined on a line of their own. _version.py will just call
- # get_keywords().
- git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
- git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
- git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
- keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
- return keywords
-
-
-class VersioneerConfig:
- """Container for Versioneer configuration parameters."""
-
- VCS: str
- style: str
- tag_prefix: str
- parentdir_prefix: str
- versionfile_source: str
- verbose: bool
-
-
-def get_config() -> VersioneerConfig:
- """Create, populate and return the VersioneerConfig() object."""
- # these strings are filled in when 'setup.py versioneer' creates
- # _version.py
- cfg = VersioneerConfig()
- cfg.VCS = "git"
- cfg.style = "%(STYLE)s"
- cfg.tag_prefix = "%(TAG_PREFIX)s"
- cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
- cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
- cfg.verbose = False
- return cfg
-
-
-class NotThisMethod(Exception):
- """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY: Dict[str, str] = {}
-HANDLERS: Dict[str, Dict[str, Callable]] = {}
-
-
-def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
- """Create decorator to mark a method as the handler of a VCS."""
- def decorate(f: Callable) -> Callable:
- """Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
- return f
- return decorate
-
-
-def run_command(
- commands: List[str],
- args: List[str],
- cwd: Optional[str] = None,
- verbose: bool = False,
- hide_stderr: bool = False,
- env: Optional[Dict[str, str]] = None,
-) -> Tuple[Optional[str], Optional[int]]:
- """Call the given command(s)."""
- assert isinstance(commands, list)
- process = None
-
- popen_kwargs: Dict[str, Any] = {}
- if sys.platform == "win32":
- # This hides the console window if pythonw.exe is used
- startupinfo = subprocess.STARTUPINFO()
- startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
- popen_kwargs["startupinfo"] = startupinfo
-
- for command in commands:
- try:
- dispcmd = str([command] + args)
- # remember shell=False, so use git.cmd on windows, not just git
- process = subprocess.Popen([command] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None), **popen_kwargs)
- break
- except OSError as e:
- if e.errno == errno.ENOENT:
- continue
- if verbose:
- print("unable to run %%s" %% dispcmd)
- print(e)
- return None, None
- else:
- if verbose:
- print("unable to find command, tried %%s" %% (commands,))
- return None, None
- stdout = process.communicate()[0].strip().decode()
- if process.returncode != 0:
- if verbose:
- print("unable to run %%s (error)" %% dispcmd)
- print("stdout was %%s" %% stdout)
- return None, process.returncode
- return stdout, process.returncode
-
-
-def versions_from_parentdir(
- parentdir_prefix: str,
- root: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for _ in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print("Tried directories %%s but none started with prefix %%s" %%
- (str(rootdirs), parentdir_prefix))
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords: Dict[str, str] = {}
- try:
- with open(versionfile_abs, "r") as fobj:
- for line in fobj:
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- except OSError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(
- keywords: Dict[str, str],
- tag_prefix: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Get version information from git keywords."""
- if "refnames" not in keywords:
- raise NotThisMethod("Short version file found")
- date = keywords.get("date")
- if date is not None:
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
-
- # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = {r.strip() for r in refnames.strip("()").split(",")}
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %%d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = {r for r in refs if re.search(r'\d', r)}
- if verbose:
- print("discarding '%%s', no digits" %% ",".join(refs - tags))
- if verbose:
- print("likely tags: %%s" %% ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- # Filter out refs that exactly match prefix or that don't start
- # with a number once the prefix is stripped (mostly a concern
- # when prefix is '')
- if not re.match(r'\d', r):
- continue
- if verbose:
- print("picking %%s" %% r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(
- tag_prefix: str,
- root: str,
- verbose: bool,
- runner: Callable = run_command
-) -> Dict[str, Any]:
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- # GIT_DIR can interfere with correct operation of Versioneer.
- # It may be intended to be passed to the Versioneer-versioned project,
- # but that should not change where we get our version from.
- env = os.environ.copy()
- env.pop("GIT_DIR", None)
- runner = functools.partial(runner, env=env)
-
- _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=not verbose)
- if rc != 0:
- if verbose:
- print("Directory %%s not under git control" %% root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = runner(GITS, [
- "describe", "--tags", "--dirty", "--always", "--long",
- "--match", f"{tag_prefix}[[:digit:]]*"
- ], cwd=root)
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces: Dict[str, Any] = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
- cwd=root)
- # --abbrev-ref was added in git-1.6.3
- if rc != 0 or branch_name is None:
- raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
- branch_name = branch_name.strip()
-
- if branch_name == "HEAD":
- # If we aren't exactly on a branch, pick a branch which represents
- # the current commit. If all else fails, we are on a branchless
- # commit.
- branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
- # --contains was added in git-1.5.4
- if rc != 0 or branches is None:
- raise NotThisMethod("'git branch --contains' returned error")
- branches = branches.split("\n")
-
- # Remove the first line if we're running detached
- if "(" in branches[0]:
- branches.pop(0)
-
- # Strip off the leading "* " from the list of branches.
- branches = [branch[2:] for branch in branches]
- if "master" in branches:
- branch_name = "master"
- elif not branches:
- branch_name = None
- else:
- # Pick the first branch that is returned. Good or bad.
- branch_name = branches[0]
-
- pieces["branch"] = branch_name
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
- if not mo:
- # unparsable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%%s'"
- %% describe_out)
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%%s' doesn't start with prefix '%%s'"
- print(fmt %% (full_tag, tag_prefix))
- pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
- %% (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
- pieces["distance"] = len(out.split()) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def plus_or_dot(pieces: Dict[str, Any]) -> str:
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces: Dict[str, Any]) -> str:
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_branch(pieces: Dict[str, Any]) -> str:
- """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
-
- The ".dev0" means not master branch. Note that .dev0 sorts backwards
- (a feature branch will appear "older" than the master branch).
-
- Exceptions:
- 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0"
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
- """Split pep440 version string at the post-release segment.
-
- Returns the release segments before the post-release and the
- post-release version number (or -1 if no post-release segment is present).
- """
- vc = str.split(ver, ".post")
- return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
-
-
-def render_pep440_pre(pieces: Dict[str, Any]) -> str:
- """TAG[.postN.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post0.devDISTANCE
- """
- if pieces["closest-tag"]:
- if pieces["distance"]:
- # update the post release segment
- tag_version, post_version = pep440_split_post(pieces["closest-tag"])
- rendered = tag_version
- if post_version is not None:
- rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"])
- else:
- rendered += ".post0.dev%%d" %% (pieces["distance"])
- else:
- # no commits, use the tag as the version
- rendered = pieces["closest-tag"]
- else:
- # exception #1
- rendered = "0.post0.dev%%d" %% pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%%s" %% pieces["short"]
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%%s" %% pieces["short"]
- return rendered
-
-
-def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
-
- The ".dev0" means not master branch.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%%s" %% pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+g%%s" %% pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_old(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%%d" %% pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces: Dict[str, Any]) -> str:
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces: Dict[str, Any]) -> str:
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-branch":
- rendered = render_pep440_branch(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-post-branch":
- rendered = render_pep440_post_branch(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%%s'" %% style)
-
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
-
-
-def get_versions() -> Dict[str, Any]:
- """Get version information or return default if unable to do so."""
- # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
- # __file__, we can work backwards from there to the root. Some
- # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
- # case we can only use expanded keywords.
-
- cfg = get_config()
- verbose = cfg.verbose
-
- try:
- return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
- verbose)
- except NotThisMethod:
- pass
-
- try:
- root = os.path.realpath(__file__)
- # versionfile_source is the relative path from the top of the source
- # tree (where the .git directory might live) to this file. Invert
- # this to find the root from __file__.
- for _ in cfg.versionfile_source.split('/'):
- root = os.path.dirname(root)
- except NameError:
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to find root of source tree",
- "date": None}
-
- try:
- pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
- return render(pieces, cfg.style)
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- except NotThisMethod:
- pass
-
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None,
- "error": "unable to compute version", "date": None}
-'''
-
-
-@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
- """Extract version information from the given file."""
- # the code embedded in _version.py can just fetch the value of these
- # keywords. When used from setup.py, we don't want to import _version.py,
- # so we do it with a regexp instead. This function is not used from
- # _version.py.
- keywords: Dict[str, str] = {}
- try:
- with open(versionfile_abs, "r") as fobj:
- for line in fobj:
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- except OSError:
- pass
- return keywords
-
-
-@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(
- keywords: Dict[str, str],
- tag_prefix: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Get version information from git keywords."""
- if "refnames" not in keywords:
- raise NotThisMethod("Short version file found")
- date = keywords.get("date")
- if date is not None:
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
-
- # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
- # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
- # -like" string, which we must then edit to make compliant), because
- # it's been around since git-1.5.3, and it's too difficult to
- # discover which version we're using, or to work around using an
- # older one.
- date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
- refnames = keywords["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("keywords are unexpanded, not using")
- raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = {r.strip() for r in refnames.strip("()").split(",")}
- # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
- # just "foo-1.0". If we see a "tag: " prefix, prefer those.
- TAG = "tag: "
- tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
- if not tags:
- # Either we're using git < 1.8.3, or there really are no tags. We use
- # a heuristic: assume all version tags have a digit. The old git %d
- # expansion behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us distinguish
- # between branches and tags. By ignoring refnames without digits, we
- # filter out many common branch names like "release" and
- # "stabilization", as well as "HEAD" and "master".
- tags = {r for r in refs if re.search(r'\d', r)}
- if verbose:
- print("discarding '%s', no digits" % ",".join(refs - tags))
- if verbose:
- print("likely tags: %s" % ",".join(sorted(tags)))
- for ref in sorted(tags):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- # Filter out refs that exactly match prefix or that don't start
- # with a number once the prefix is stripped (mostly a concern
- # when prefix is '')
- if not re.match(r'\d', r):
- continue
- if verbose:
- print("picking %s" % r)
- return {"version": r,
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": None,
- "date": date}
- # no suitable tags, so version is "0+unknown", but full hex is still there
- if verbose:
- print("no suitable tags, using unknown + full revision id")
- return {"version": "0+unknown",
- "full-revisionid": keywords["full"].strip(),
- "dirty": False, "error": "no suitable tags", "date": None}
-
-
-@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(
- tag_prefix: str,
- root: str,
- verbose: bool,
- runner: Callable = run_command
-) -> Dict[str, Any]:
- """Get version from 'git describe' in the root of the source tree.
-
- This only gets called if the git-archive 'subst' keywords were *not*
- expanded, and _version.py hasn't already been rewritten with a short
- version string, meaning we're inside a checked out source tree.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
-
- # GIT_DIR can interfere with correct operation of Versioneer.
- # It may be intended to be passed to the Versioneer-versioned project,
- # but that should not change where we get our version from.
- env = os.environ.copy()
- env.pop("GIT_DIR", None)
- runner = functools.partial(runner, env=env)
-
- _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=not verbose)
- if rc != 0:
- if verbose:
- print("Directory %s not under git control" % root)
- raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
- # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
- # if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = runner(GITS, [
- "describe", "--tags", "--dirty", "--always", "--long",
- "--match", f"{tag_prefix}[[:digit:]]*"
- ], cwd=root)
- # --long was added in git-1.5.5
- if describe_out is None:
- raise NotThisMethod("'git describe' failed")
- describe_out = describe_out.strip()
- full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
- if full_out is None:
- raise NotThisMethod("'git rev-parse' failed")
- full_out = full_out.strip()
-
- pieces: Dict[str, Any] = {}
- pieces["long"] = full_out
- pieces["short"] = full_out[:7] # maybe improved later
- pieces["error"] = None
-
- branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
- cwd=root)
- # --abbrev-ref was added in git-1.6.3
- if rc != 0 or branch_name is None:
- raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
- branch_name = branch_name.strip()
-
- if branch_name == "HEAD":
- # If we aren't exactly on a branch, pick a branch which represents
- # the current commit. If all else fails, we are on a branchless
- # commit.
- branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
- # --contains was added in git-1.5.4
- if rc != 0 or branches is None:
- raise NotThisMethod("'git branch --contains' returned error")
- branches = branches.split("\n")
-
- # Remove the first line if we're running detached
- if "(" in branches[0]:
- branches.pop(0)
-
- # Strip off the leading "* " from the list of branches.
- branches = [branch[2:] for branch in branches]
- if "master" in branches:
- branch_name = "master"
- elif not branches:
- branch_name = None
- else:
- # Pick the first branch that is returned. Good or bad.
- branch_name = branches[0]
-
- pieces["branch"] = branch_name
-
- # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
- # TAG might have hyphens.
- git_describe = describe_out
-
- # look for -dirty suffix
- dirty = git_describe.endswith("-dirty")
- pieces["dirty"] = dirty
- if dirty:
- git_describe = git_describe[:git_describe.rindex("-dirty")]
-
- # now we have TAG-NUM-gHEX or HEX
-
- if "-" in git_describe:
- # TAG-NUM-gHEX
- mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
- if not mo:
- # unparsable. Maybe git-describe is misbehaving?
- pieces["error"] = ("unable to parse git-describe output: '%s'"
- % describe_out)
- return pieces
-
- # tag
- full_tag = mo.group(1)
- if not full_tag.startswith(tag_prefix):
- if verbose:
- fmt = "tag '%s' doesn't start with prefix '%s'"
- print(fmt % (full_tag, tag_prefix))
- pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
- % (full_tag, tag_prefix))
- return pieces
- pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
- # distance: number of commits since tag
- pieces["distance"] = int(mo.group(2))
-
- # commit: short hex revision ID
- pieces["short"] = mo.group(3)
-
- else:
- # HEX: no tags
- pieces["closest-tag"] = None
- out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
- pieces["distance"] = len(out.split()) # total number of commits
-
- # commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
- # Use only the last line. Previous lines may contain GPG signature
- # information.
- date = date.splitlines()[-1]
- pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
- return pieces
-
-
-def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None:
- """Git-specific installation logic for Versioneer.
-
- For Git, this means creating/changing .gitattributes to mark _version.py
- for export-subst keyword substitution.
- """
- GITS = ["git"]
- if sys.platform == "win32":
- GITS = ["git.cmd", "git.exe"]
- files = [versionfile_source]
- if ipy:
- files.append(ipy)
- if "VERSIONEER_PEP518" not in globals():
- try:
- my_path = __file__
- if my_path.endswith((".pyc", ".pyo")):
- my_path = os.path.splitext(my_path)[0] + ".py"
- versioneer_file = os.path.relpath(my_path)
- except NameError:
- versioneer_file = "versioneer.py"
- files.append(versioneer_file)
- present = False
- try:
- with open(".gitattributes", "r") as fobj:
- for line in fobj:
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- break
- except OSError:
- pass
- if not present:
- with open(".gitattributes", "a+") as fobj:
- fobj.write(f"{versionfile_source} export-subst\n")
- files.append(".gitattributes")
- run_command(GITS, ["add", "--"] + files)
-
-
-def versions_from_parentdir(
- parentdir_prefix: str,
- root: str,
- verbose: bool,
-) -> Dict[str, Any]:
- """Try to determine the version from the parent directory name.
-
- Source tarballs conventionally unpack into a directory that includes both
- the project name and a version string. We will also support searching up
- two directory levels for an appropriately named parent directory
- """
- rootdirs = []
-
- for _ in range(3):
- dirname = os.path.basename(root)
- if dirname.startswith(parentdir_prefix):
- return {"version": dirname[len(parentdir_prefix):],
- "full-revisionid": None,
- "dirty": False, "error": None, "date": None}
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
-
- if verbose:
- print("Tried directories %s but none started with prefix %s" %
- (str(rootdirs), parentdir_prefix))
- raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.29) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-import json
-
-version_json = '''
-%s
-''' # END VERSION_JSON
-
-
-def get_versions():
- return json.loads(version_json)
-"""
-
-
-def versions_from_file(filename: str) -> Dict[str, Any]:
- """Try to determine the version from _version.py if present."""
- try:
- with open(filename) as f:
- contents = f.read()
- except OSError:
- raise NotThisMethod("unable to read _version.py")
- mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
- contents, re.M | re.S)
- if not mo:
- mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
- contents, re.M | re.S)
- if not mo:
- raise NotThisMethod("no version_json in _version.py")
- return json.loads(mo.group(1))
-
-
-def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None:
- """Write the given version number to the given _version.py file."""
- contents = json.dumps(versions, sort_keys=True,
- indent=1, separators=(",", ": "))
- with open(filename, "w") as f:
- f.write(SHORT_VERSION_PY % contents)
-
- print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def plus_or_dot(pieces: Dict[str, Any]) -> str:
- """Return a + if we don't already have one, else return a ."""
- if "+" in pieces.get("closest-tag", ""):
- return "."
- return "+"
-
-
-def render_pep440(pieces: Dict[str, Any]) -> str:
- """Build up version string, with post-release "local version identifier".
-
- Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
- get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
- Exceptions:
- 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0+untagged.%d.g%s" % (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_branch(pieces: Dict[str, Any]) -> str:
- """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
-
- The ".dev0" means not master branch. Note that .dev0 sorts backwards
- (a feature branch will appear "older" than the master branch).
-
- Exceptions:
- 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0"
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+untagged.%d.g%s" % (pieces["distance"],
- pieces["short"])
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
- """Split pep440 version string at the post-release segment.
-
- Returns the release segments before the post-release and the
- post-release version number (or -1 if no post-release segment is present).
- """
- vc = str.split(ver, ".post")
- return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
-
-
-def render_pep440_pre(pieces: Dict[str, Any]) -> str:
- """TAG[.postN.devDISTANCE] -- No -dirty.
-
- Exceptions:
- 1: no tags. 0.post0.devDISTANCE
- """
- if pieces["closest-tag"]:
- if pieces["distance"]:
- # update the post release segment
- tag_version, post_version = pep440_split_post(pieces["closest-tag"])
- rendered = tag_version
- if post_version is not None:
- rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
- else:
- rendered += ".post0.dev%d" % (pieces["distance"])
- else:
- # no commits, use the tag as the version
- rendered = pieces["closest-tag"]
- else:
- # exception #1
- rendered = "0.post0.dev%d" % pieces["distance"]
- return rendered
-
-
-def render_pep440_post(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX] .
-
- The ".dev0" means dirty. Note that .dev0 sorts backwards
- (a dirty tree will appear "older" than the corresponding clean one),
- but you shouldn't be releasing software with -dirty anyways.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- return rendered
-
-
-def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
-
- The ".dev0" means not master branch.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += plus_or_dot(pieces)
- rendered += "g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["branch"] != "master":
- rendered += ".dev0"
- rendered += "+g%s" % pieces["short"]
- if pieces["dirty"]:
- rendered += ".dirty"
- return rendered
-
-
-def render_pep440_old(pieces: Dict[str, Any]) -> str:
- """TAG[.postDISTANCE[.dev0]] .
-
- The ".dev0" means dirty.
-
- Exceptions:
- 1: no tags. 0.postDISTANCE[.dev0]
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"] or pieces["dirty"]:
- rendered += ".post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- else:
- # exception #1
- rendered = "0.post%d" % pieces["distance"]
- if pieces["dirty"]:
- rendered += ".dev0"
- return rendered
-
-
-def render_git_describe(pieces: Dict[str, Any]) -> str:
- """TAG[-DISTANCE-gHEX][-dirty].
-
- Like 'git describe --tags --dirty --always'.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- if pieces["distance"]:
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render_git_describe_long(pieces: Dict[str, Any]) -> str:
- """TAG-DISTANCE-gHEX[-dirty].
-
- Like 'git describe --tags --dirty --always -long'.
- The distance/hash is unconditional.
-
- Exceptions:
- 1: no tags. HEX[-dirty] (note: no 'g' prefix)
- """
- if pieces["closest-tag"]:
- rendered = pieces["closest-tag"]
- rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
- else:
- # exception #1
- rendered = pieces["short"]
- if pieces["dirty"]:
- rendered += "-dirty"
- return rendered
-
-
-def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
- """Render the given version pieces into the requested style."""
- if pieces["error"]:
- return {"version": "unknown",
- "full-revisionid": pieces.get("long"),
- "dirty": None,
- "error": pieces["error"],
- "date": None}
-
- if not style or style == "default":
- style = "pep440" # the default
-
- if style == "pep440":
- rendered = render_pep440(pieces)
- elif style == "pep440-branch":
- rendered = render_pep440_branch(pieces)
- elif style == "pep440-pre":
- rendered = render_pep440_pre(pieces)
- elif style == "pep440-post":
- rendered = render_pep440_post(pieces)
- elif style == "pep440-post-branch":
- rendered = render_pep440_post_branch(pieces)
- elif style == "pep440-old":
- rendered = render_pep440_old(pieces)
- elif style == "git-describe":
- rendered = render_git_describe(pieces)
- elif style == "git-describe-long":
- rendered = render_git_describe_long(pieces)
- else:
- raise ValueError("unknown style '%s'" % style)
-
- return {"version": rendered, "full-revisionid": pieces["long"],
- "dirty": pieces["dirty"], "error": None,
- "date": pieces.get("date")}
-
-
-class VersioneerBadRootError(Exception):
- """The project root directory is unknown or missing key files."""
-
-
-def get_versions(verbose: bool = False) -> Dict[str, Any]:
- """Get the project version from whatever source is available.
-
- Returns dict with two keys: 'version' and 'full'.
- """
- if "versioneer" in sys.modules:
- # see the discussion in cmdclass.py:get_cmdclass()
- del sys.modules["versioneer"]
-
- root = get_root()
- cfg = get_config_from_root(root)
-
- assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
- handlers = HANDLERS.get(cfg.VCS)
- assert handlers, "unrecognized VCS '%s'" % cfg.VCS
- verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None`
- assert cfg.versionfile_source is not None, \
- "please set versioneer.versionfile_source"
- assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
-
- versionfile_abs = os.path.join(root, cfg.versionfile_source)
-
- # extract version from first of: _version.py, VCS command (e.g. 'git
- # describe'), parentdir. This is meant to work for developers using a
- # source checkout, for users of a tarball created by 'setup.py sdist',
- # and for users of a tarball/zipball created by 'git archive' or github's
- # download-from-tag feature or the equivalent in other VCSes.
-
- get_keywords_f = handlers.get("get_keywords")
- from_keywords_f = handlers.get("keywords")
- if get_keywords_f and from_keywords_f:
- try:
- keywords = get_keywords_f(versionfile_abs)
- ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
- if verbose:
- print("got version from expanded keyword %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- ver = versions_from_file(versionfile_abs)
- if verbose:
- print("got version from file %s %s" % (versionfile_abs, ver))
- return ver
- except NotThisMethod:
- pass
-
- from_vcs_f = handlers.get("pieces_from_vcs")
- if from_vcs_f:
- try:
- pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
- ver = render(pieces, cfg.style)
- if verbose:
- print("got version from VCS %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- try:
- if cfg.parentdir_prefix:
- ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
- if verbose:
- print("got version from parentdir %s" % ver)
- return ver
- except NotThisMethod:
- pass
-
- if verbose:
- print("unable to compute version")
-
- return {"version": "0+unknown", "full-revisionid": None,
- "dirty": None, "error": "unable to compute version",
- "date": None}
-
-
-def get_version() -> str:
- """Get the short version string for this project."""
- return get_versions()["version"]
-
-
-def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None):
- """Get the custom setuptools subclasses used by Versioneer.
-
- If the package uses a different cmdclass (e.g. one from numpy), it
- should be provide as an argument.
- """
- if "versioneer" in sys.modules:
- del sys.modules["versioneer"]
- # this fixes the "python setup.py develop" case (also 'install' and
- # 'easy_install .'), in which subdependencies of the main project are
- # built (using setup.py bdist_egg) in the same python process. Assume
- # a main project A and a dependency B, which use different versions
- # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
- # sys.modules by the time B's setup.py is executed, causing B to run
- # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
- # sandbox that restores sys.modules to it's pre-build state, so the
- # parent is protected against the child's "import versioneer". By
- # removing ourselves from sys.modules here, before the child build
- # happens, we protect the child from the parent's versioneer too.
- # Also see https://github.com/python-versioneer/python-versioneer/issues/52
-
- cmds = {} if cmdclass is None else cmdclass.copy()
-
- # we add "version" to setuptools
- from setuptools import Command
-
- class cmd_version(Command):
- description = "report generated version string"
- user_options: List[Tuple[str, str, str]] = []
- boolean_options: List[str] = []
-
- def initialize_options(self) -> None:
- pass
-
- def finalize_options(self) -> None:
- pass
-
- def run(self) -> None:
- vers = get_versions(verbose=True)
- print("Version: %s" % vers["version"])
- print(" full-revisionid: %s" % vers.get("full-revisionid"))
- print(" dirty: %s" % vers.get("dirty"))
- print(" date: %s" % vers.get("date"))
- if vers["error"]:
- print(" error: %s" % vers["error"])
- cmds["version"] = cmd_version
-
- # we override "build_py" in setuptools
- #
- # most invocation pathways end up running build_py:
- # distutils/build -> build_py
- # distutils/install -> distutils/build ->..
- # setuptools/bdist_wheel -> distutils/install ->..
- # setuptools/bdist_egg -> distutils/install_lib -> build_py
- # setuptools/install -> bdist_egg ->..
- # setuptools/develop -> ?
- # pip install:
- # copies source tree to a tempdir before running egg_info/etc
- # if .git isn't copied too, 'git describe' will fail
- # then does setup.py bdist_wheel, or sometimes setup.py install
- # setup.py egg_info -> ?
-
- # pip install -e . and setuptool/editable_wheel will invoke build_py
- # but the build_py command is not expected to copy any files.
-
- # we override different "build_py" commands for both environments
- if 'build_py' in cmds:
- _build_py: Any = cmds['build_py']
- else:
- from setuptools.command.build_py import build_py as _build_py
-
- class cmd_build_py(_build_py):
- def run(self) -> None:
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- _build_py.run(self)
- if getattr(self, "editable_mode", False):
- # During editable installs `.py` and data files are
- # not copied to build_lib
- return
- # now locate _version.py in the new build/ directory and replace
- # it with an updated value
- if cfg.versionfile_build:
- target_versionfile = os.path.join(self.build_lib,
- cfg.versionfile_build)
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
- cmds["build_py"] = cmd_build_py
-
- if 'build_ext' in cmds:
- _build_ext: Any = cmds['build_ext']
- else:
- from setuptools.command.build_ext import build_ext as _build_ext
-
- class cmd_build_ext(_build_ext):
- def run(self) -> None:
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- _build_ext.run(self)
- if self.inplace:
- # build_ext --inplace will only build extensions in
- # build/lib<..> dir with no _version.py to write to.
- # As in place builds will already have a _version.py
- # in the module dir, we do not need to write one.
- return
- # now locate _version.py in the new build/ directory and replace
- # it with an updated value
- if not cfg.versionfile_build:
- return
- target_versionfile = os.path.join(self.build_lib,
- cfg.versionfile_build)
- if not os.path.exists(target_versionfile):
- print(f"Warning: {target_versionfile} does not exist, skipping "
- "version update. This can happen if you are running build_ext "
- "without first running build_py.")
- return
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
- cmds["build_ext"] = cmd_build_ext
-
- if "cx_Freeze" in sys.modules: # cx_freeze enabled?
- from cx_Freeze.dist import build_exe as _build_exe # type: ignore
- # nczeczulin reports that py2exe won't like the pep440-style string
- # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
- # setup(console=[{
- # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
- # "product_version": versioneer.get_version(),
- # ...
-
- class cmd_build_exe(_build_exe):
- def run(self) -> None:
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _build_exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(LONG %
- {"DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- })
- cmds["build_exe"] = cmd_build_exe
- del cmds["build_py"]
-
- if 'py2exe' in sys.modules: # py2exe enabled?
- try:
- from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore
- except ImportError:
- from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore
-
- class cmd_py2exe(_py2exe):
- def run(self) -> None:
- root = get_root()
- cfg = get_config_from_root(root)
- versions = get_versions()
- target_versionfile = cfg.versionfile_source
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile, versions)
-
- _py2exe.run(self)
- os.unlink(target_versionfile)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(LONG %
- {"DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- })
- cmds["py2exe"] = cmd_py2exe
-
- # sdist farms its file list building out to egg_info
- if 'egg_info' in cmds:
- _egg_info: Any = cmds['egg_info']
- else:
- from setuptools.command.egg_info import egg_info as _egg_info
-
- class cmd_egg_info(_egg_info):
- def find_sources(self) -> None:
- # egg_info.find_sources builds the manifest list and writes it
- # in one shot
- super().find_sources()
-
- # Modify the filelist and normalize it
- root = get_root()
- cfg = get_config_from_root(root)
- self.filelist.append('versioneer.py')
- if cfg.versionfile_source:
- # There are rare cases where versionfile_source might not be
- # included by default, so we must be explicit
- self.filelist.append(cfg.versionfile_source)
- self.filelist.sort()
- self.filelist.remove_duplicates()
-
- # The write method is hidden in the manifest_maker instance that
- # generated the filelist and was thrown away
- # We will instead replicate their final normalization (to unicode,
- # and POSIX-style paths)
- from setuptools import unicode_utils
- normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/')
- for f in self.filelist.files]
-
- manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt')
- with open(manifest_filename, 'w') as fobj:
- fobj.write('\n'.join(normalized))
-
- cmds['egg_info'] = cmd_egg_info
-
- # we override different "sdist" commands for both environments
- if 'sdist' in cmds:
- _sdist: Any = cmds['sdist']
- else:
- from setuptools.command.sdist import sdist as _sdist
-
- class cmd_sdist(_sdist):
- def run(self) -> None:
- versions = get_versions()
- self._versioneer_generated_versions = versions
- # unless we update this, the command will keep using the old
- # version
- self.distribution.metadata.version = versions["version"]
- return _sdist.run(self)
-
- def make_release_tree(self, base_dir: str, files: List[str]) -> None:
- root = get_root()
- cfg = get_config_from_root(root)
- _sdist.make_release_tree(self, base_dir, files)
- # now locate _version.py in the new base_dir directory
- # (remembering that it may be a hardlink) and replace it with an
- # updated value
- target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
- print("UPDATING %s" % target_versionfile)
- write_to_version_file(target_versionfile,
- self._versioneer_generated_versions)
- cmds["sdist"] = cmd_sdist
-
- return cmds
-
-
-CONFIG_ERROR = """
-setup.cfg is missing the necessary Versioneer configuration. You need
-a section like:
-
- [versioneer]
- VCS = git
- style = pep440
- versionfile_source = src/myproject/_version.py
- versionfile_build = myproject/_version.py
- tag_prefix =
- parentdir_prefix = myproject-
-
-You will also need to edit your setup.py to use the results:
-
- import versioneer
- setup(version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(), ...)
-
-Please read the docstring in ./versioneer.py for configuration instructions,
-edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
-"""
-
-SAMPLE_CONFIG = """
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-#VCS = git
-#style = pep440
-#versionfile_source =
-#versionfile_build =
-#tag_prefix =
-#parentdir_prefix =
-
-"""
-
-OLD_SNIPPET = """
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-INIT_PY_SNIPPET = """
-from . import {0}
-__version__ = {0}.get_versions()['version']
-"""
-
-
-def do_setup() -> int:
- """Do main VCS-independent setup function for installing Versioneer."""
- root = get_root()
- try:
- cfg = get_config_from_root(root)
- except (OSError, configparser.NoSectionError,
- configparser.NoOptionError) as e:
- if isinstance(e, (OSError, configparser.NoSectionError)):
- print("Adding sample versioneer config to setup.cfg",
- file=sys.stderr)
- with open(os.path.join(root, "setup.cfg"), "a") as f:
- f.write(SAMPLE_CONFIG)
- print(CONFIG_ERROR, file=sys.stderr)
- return 1
-
- print(" creating %s" % cfg.versionfile_source)
- with open(cfg.versionfile_source, "w") as f:
- LONG = LONG_VERSION_PY[cfg.VCS]
- f.write(LONG % {"DOLLAR": "$",
- "STYLE": cfg.style,
- "TAG_PREFIX": cfg.tag_prefix,
- "PARENTDIR_PREFIX": cfg.parentdir_prefix,
- "VERSIONFILE_SOURCE": cfg.versionfile_source,
- })
-
- ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
- "__init__.py")
- maybe_ipy: Optional[str] = ipy
- if os.path.exists(ipy):
- try:
- with open(ipy, "r") as f:
- old = f.read()
- except OSError:
- old = ""
- module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
- snippet = INIT_PY_SNIPPET.format(module)
- if OLD_SNIPPET in old:
- print(" replacing boilerplate in %s" % ipy)
- with open(ipy, "w") as f:
- f.write(old.replace(OLD_SNIPPET, snippet))
- elif snippet not in old:
- print(" appending to %s" % ipy)
- with open(ipy, "a") as f:
- f.write(snippet)
- else:
- print(" %s unmodified" % ipy)
- else:
- print(" %s doesn't exist, ok" % ipy)
- maybe_ipy = None
-
- # Make VCS-specific changes. For git, this means creating/changing
- # .gitattributes to mark _version.py for export-subst keyword
- # substitution.
- do_vcs_install(cfg.versionfile_source, maybe_ipy)
- return 0
-
-
-def scan_setup_py() -> int:
- """Validate the contents of setup.py against Versioneer's expectations."""
- found = set()
- setters = False
- errors = 0
- with open("setup.py", "r") as f:
- for line in f.readlines():
- if "import versioneer" in line:
- found.add("import")
- if "versioneer.get_cmdclass()" in line:
- found.add("cmdclass")
- if "versioneer.get_version()" in line:
- found.add("get_version")
- if "versioneer.VCS" in line:
- setters = True
- if "versioneer.versionfile_source" in line:
- setters = True
- if len(found) != 3:
- print("")
- print("Your setup.py appears to be missing some important items")
- print("(but I might be wrong). Please make sure it has something")
- print("roughly like the following:")
- print("")
- print(" import versioneer")
- print(" setup( version=versioneer.get_version(),")
- print(" cmdclass=versioneer.get_cmdclass(), ...)")
- print("")
- errors += 1
- if setters:
- print("You should remove lines like 'versioneer.VCS = ' and")
- print("'versioneer.versionfile_source = ' . This configuration")
- print("now lives in setup.cfg, and should be removed from setup.py")
- print("")
- errors += 1
- return errors
-
-
-def setup_command() -> NoReturn:
- """Set up Versioneer and exit with appropriate error code."""
- errors = do_setup()
- errors += scan_setup_py()
- sys.exit(1 if errors else 0)
-
-
-if __name__ == "__main__":
- cmd = sys.argv[1]
- if cmd == "setup":
- setup_command()