From 491d8f06f42fd961c97d7962ad78e9921aa4bfad Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 19 Nov 2025 17:01:03 +0200 Subject: [PATCH 01/30] invoke extraction with long retries --- lib/idp_common_pkg/idp_common/extraction/agentic_idp.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py index f34ffc15..c563e03e 100644 --- a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py +++ b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py @@ -46,6 +46,9 @@ update_todo, view_todo_list, ) +from lib.idp_common_pkg.idp_common.utils.bedrock_utils import ( + async_exponential_backoff_retry, +) # Supported image formats for Bedrock API SUPPORTED_IMAGE_FORMATS = {"jpeg", "png", "gif", "webp"} From 3d38d1d9fa6fcca60dab7721aea9dd6c4b7d3fc5 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Thu, 20 Nov 2025 11:31:34 +0200 Subject: [PATCH 02/30] add review agent model config --- lib/idp_common_pkg/idp_common/config/models.py | 9 +++++++++ lib/idp_common_pkg/idp_common/extraction/agentic_idp.py | 3 --- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 0c80fc62..209b880e 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -132,6 +132,15 @@ def parse_int(cls, v: Any) -> int: if isinstance(v, str): return int(v) if v else 0 return int(v) + + @model_validator(mode="after") + def model_validator(self) -> Self: + + if not self.agentic.review_agent_model: + self.agentic.review_agent_model = self.model + + return self + @model_validator(mode="after") def set_default_review_agent_model(self) -> Self: diff --git a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py index c563e03e..f34ffc15 100644 --- a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py +++ b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py @@ -46,9 +46,6 @@ update_todo, view_todo_list, ) -from lib.idp_common_pkg.idp_common.utils.bedrock_utils import ( - async_exponential_backoff_retry, -) # Supported image formats for Bedrock API SUPPORTED_IMAGE_FORMATS = {"jpeg", "png", "gif", "webp"} From be4d16004de9d29893e0b9ae4f11bb1356e95dcf Mon Sep 17 00:00:00 2001 From: "Kazmer, Nagy-Betegh" Date: Wed, 26 Nov 2025 10:28:25 +0000 Subject: [PATCH 03/30] fixes --- lib/idp_common_pkg/idp_common/config/models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 209b880e..0c657763 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -132,10 +132,10 @@ def parse_int(cls, v: Any) -> int: if isinstance(v, str): return int(v) if v else 0 return int(v) - - @model_validator(mode="after") - def model_validator(self) -> Self: + @model_validator(mode="after") + def set_default_review_agent_model(self) -> Self: + """Set review_agent_model to extraction model if not specified.""" if not self.agentic.review_agent_model: self.agentic.review_agent_model = self.model From cbedb14a808698451b8056a245d4369ddfb275bc Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Thu, 20 Nov 2025 11:31:34 +0200 Subject: [PATCH 04/30] add review agent model config --- lib/idp_common_pkg/idp_common/config/models.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 0c657763..0d5f4bf8 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -133,6 +133,13 @@ def parse_int(cls, v: Any) -> int: return int(v) if v else 0 return int(v) + @model_validator(mode="after") + def model_validator(self) -> Self: + if not self.agentic.review_agent_model: + self.agentic.review_agent_model = self.model + + return self + @model_validator(mode="after") def set_default_review_agent_model(self) -> Self: """Set review_agent_model to extraction model if not specified.""" From 049c4fef2799ac4b3adce652fa5b2af345f518c0 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Tue, 18 Nov 2025 16:16:11 +0200 Subject: [PATCH 05/30] new assesment --- .../bank-statement-sample/config.yaml | 22 +- .../lending-package-sample/config.yaml | 24 +- .../config.yaml | 25 +- .../rvl-cdip-package-sample/config.yaml | 25 +- .../rvl-cdip-package-sample/config.yaml | 3 + idp_cli/pyproject.toml | 36 +- .../idp_common/assessment/granular_service.py | 1162 +++--------- .../idp_common/assessment/strands_executor.py | 244 +++ .../idp_common/assessment/strands_models.py | 76 + .../idp_common/assessment/strands_service.py | 391 ++++ .../idp_common/assessment/strands_tools.py | 161 ++ .../idp_common/config/models.py | 26 +- .../idp_common/extraction/models.py | 77 + .../idp_common/image/__init__.py | 154 +- .../idp_common/utils/grid_overlay.py | 387 ++++ lib/idp_common_pkg/pyproject.toml | 24 +- lib/idp_common_pkg/tests/conftest.py | 7 +- .../tests/unit/assessment/conftest.py | 24 + .../unit/config/test_configuration_sync.py | 11 +- .../tests/unit/test_granular_assessment.py | 415 ++--- lib/idp_common_pkg/uv.lock | 1576 +++-------------- .../src/assessment_function/index.py | 256 ++- .../src/assessment_function/index.py | 125 +- scripts/test_grid_overlay.py | 325 ++++ 24 files changed, 2743 insertions(+), 2833 deletions(-) create mode 100644 lib/idp_common_pkg/idp_common/assessment/strands_executor.py create mode 100644 lib/idp_common_pkg/idp_common/assessment/strands_models.py create mode 100644 lib/idp_common_pkg/idp_common/assessment/strands_service.py create mode 100644 lib/idp_common_pkg/idp_common/assessment/strands_tools.py create mode 100644 lib/idp_common_pkg/idp_common/utils/grid_overlay.py create mode 100644 lib/idp_common_pkg/tests/unit/assessment/conftest.py create mode 100644 scripts/test_grid_overlay.py diff --git a/config_library/pattern-2/bank-statement-sample/config.yaml b/config_library/pattern-2/bank-statement-sample/config.yaml index 3a8a2086..d345dca4 100644 --- a/config_library/pattern-2/bank-statement-sample/config.yaml +++ b/config_library/pattern-2/bank-statement-sample/config.yaml @@ -405,11 +405,7 @@ assessment: image: target_height: "" target_width: "" - granular: - enabled: true - max_workers: "20" - simple_batch_size: "3" - list_batch_size: "1" + max_workers: "20" default_confidence_threshold: "0.8" top_p: "0.0" max_tokens: "10000" @@ -462,12 +458,15 @@ assessment: - page: Page number where the field appears (starting from 1) Coordinate system: + - The document images have ruler markings along all edges showing the 0-1000 coordinate scale + - Use these ruler markings to determine precise coordinates for bounding boxes - Use normalized scale 0-1000 for both x and y axes - x1, y1 = top-left corner of bounding box - x2, y2 = bottom-right corner of bounding box - Ensure x2 > x1 and y2 > y1 - Make bounding boxes tight around the actual text content - If a field spans multiple lines, create a bounding box that encompasses all relevant text + - Reference the ruler markings on the image edges to provide accurate coordinates @@ -529,6 +528,9 @@ assessment: } IMPORTANT: + - The contains raw JSON schema for the attributes you should assess + - The contains the FULL extraction data (you have complete context for cross-referencing) + - The specifies which attributes to focus on - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations @@ -549,9 +551,13 @@ assessment: <> - - {ATTRIBUTE_NAMES_AND_DESCRIPTIONS} - + + {TASK_SCHEMA} + + + + {TASK_INSTRUCTION} + {EXTRACTION_RESULTS} diff --git a/config_library/pattern-2/lending-package-sample/config.yaml b/config_library/pattern-2/lending-package-sample/config.yaml index 463f8814..92928e1b 100644 --- a/config_library/pattern-2/lending-package-sample/config.yaml +++ b/config_library/pattern-2/lending-package-sample/config.yaml @@ -10,6 +10,7 @@ ocr: - name: LAYOUT - name: TABLES - name: SIGNATURES + max_workers: "20" image: target_width: "" target_height: "" @@ -1253,6 +1254,7 @@ extraction: agentic: enabled: false review_agent: false + max_workers: "20" image: target_width: "" target_height: "" @@ -1426,14 +1428,10 @@ summarization: assessment: enabled: true validation_enabled: false + max_workers: "20" image: target_height: "" target_width: "" - granular: - enabled: true - max_workers: "20" - simple_batch_size: "3" - list_batch_size: "1" default_confidence_threshold: "0.8" top_p: "0.0" max_tokens: "10000" @@ -1486,12 +1484,15 @@ assessment: - page: Page number where the field appears (starting from 1) Coordinate system: + - The document images have ruler markings along all edges showing the 0-1000 coordinate scale + - Use these ruler markings to determine precise coordinates for bounding boxes - Use normalized scale 0-1000 for both x and y axes - x1, y1 = top-left corner of bounding box - x2, y2 = bottom-right corner of bounding box - Ensure x2 > x1 and y2 > y1 - Make bounding boxes tight around the actual text content - If a field spans multiple lines, create a bounding box that encompasses all relevant text + - Reference the ruler markings on the image edges to provide accurate coordinates @@ -1553,6 +1554,9 @@ assessment: } IMPORTANT: + - The contains raw JSON schema for the attributes you should assess + - The contains the FULL extraction data (you have complete context for cross-referencing) + - The specifies which attributes to focus on - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations @@ -1573,9 +1577,13 @@ assessment: <> - - {ATTRIBUTE_NAMES_AND_DESCRIPTIONS} - + + {TASK_SCHEMA} + + + + {TASK_INSTRUCTION} + {EXTRACTION_RESULTS} diff --git a/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml b/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml index c725f83b..5009d75a 100644 --- a/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml +++ b/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml @@ -11,6 +11,7 @@ ocr: - name: LAYOUT - name: TABLES - name: SIGNATURES + max_workers: "20" image: dpi: "150" target_width: "" @@ -809,6 +810,7 @@ classes: classification: maxPagesForClassification: "ALL" + max_workers: "20" image: target_height: "" target_width: "" @@ -868,6 +870,7 @@ classification: 5. Outputting in the exact JSON format specified in extraction: + max_workers: "20" image: target_height: "" target_width: "" @@ -973,14 +976,10 @@ extraction: assessment: enabled: true validation_enabled: false + max_workers: "20" image: target_height: "" target_width: "" - granular: - enabled: true - max_workers: "20" - simple_batch_size: "3" - list_batch_size: "1" default_confidence_threshold: "0.8" top_p: "0.0" max_tokens: "10000" @@ -1033,12 +1032,15 @@ assessment: - page: Page number where the field appears (starting from 1) Coordinate system: + - The document images have ruler markings along all edges showing the 0-1000 coordinate scale + - Use these ruler markings to determine precise coordinates for bounding boxes - Use normalized scale 0-1000 for both x and y axes - x1, y1 = top-left corner of bounding box - x2, y2 = bottom-right corner of bounding box - Ensure x2 > x1 and y2 > y1 - Make bounding boxes tight around the actual text content - If a field spans multiple lines, create a bounding box that encompasses all relevant text + - Reference the ruler markings on the image edges to provide accurate coordinates @@ -1100,6 +1102,9 @@ assessment: } IMPORTANT: + - The contains raw JSON schema for the attributes you should assess + - The contains the FULL extraction data (you have complete context for cross-referencing) + - The specifies which attributes to focus on - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations @@ -1120,9 +1125,13 @@ assessment: <> - - {ATTRIBUTE_NAMES_AND_DESCRIPTIONS} - + + {TASK_SCHEMA} + + + + {TASK_INSTRUCTION} + {EXTRACTION_RESULTS} diff --git a/config_library/pattern-2/rvl-cdip-package-sample/config.yaml b/config_library/pattern-2/rvl-cdip-package-sample/config.yaml index 11b38ab0..f7720b03 100644 --- a/config_library/pattern-2/rvl-cdip-package-sample/config.yaml +++ b/config_library/pattern-2/rvl-cdip-package-sample/config.yaml @@ -10,6 +10,7 @@ ocr: - name: LAYOUT - name: TABLES - name: SIGNATURES + max_workers: "20" image: dpi: "150" target_width: "" @@ -764,6 +765,7 @@ classes: labeled 'notes', 'remarks', or 'comments'. classification: maxPagesForClassification: "ALL" + max_workers: "20" image: target_height: "" target_width: "" @@ -907,6 +909,7 @@ classification: classificationMethod: textbasedHolisticClassification sectionSplitting: llm_determined extraction: + max_workers: "20" image: target_width: "" target_height: "" @@ -1068,14 +1071,10 @@ summarization: assessment: enabled: true validation_enabled: false + max_workers: "20" image: target_height: "" target_width: "" - granular: - enabled: true - max_workers: "20" - simple_batch_size: "3" - list_batch_size: "1" default_confidence_threshold: "0.8" top_p: "0.0" max_tokens: "10000" @@ -1128,12 +1127,15 @@ assessment: - page: Page number where the field appears (starting from 1) Coordinate system: + - The document images have ruler markings along all edges showing the 0-1000 coordinate scale + - Use these ruler markings to determine precise coordinates for bounding boxes - Use normalized scale 0-1000 for both x and y axes - x1, y1 = top-left corner of bounding box - x2, y2 = bottom-right corner of bounding box - Ensure x2 > x1 and y2 > y1 - Make bounding boxes tight around the actual text content - If a field spans multiple lines, create a bounding box that encompasses all relevant text + - Reference the ruler markings on the image edges to provide accurate coordinates @@ -1195,6 +1197,9 @@ assessment: } IMPORTANT: + - The contains raw JSON schema for the attributes you should assess + - The contains the FULL extraction data (you have complete context for cross-referencing) + - The specifies which attributes to focus on - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations @@ -1215,9 +1220,13 @@ assessment: <> - - {ATTRIBUTE_NAMES_AND_DESCRIPTIONS} - + + {TASK_SCHEMA} + + + + {TASK_INSTRUCTION} + {EXTRACTION_RESULTS} diff --git a/config_library/pattern-3/rvl-cdip-package-sample/config.yaml b/config_library/pattern-3/rvl-cdip-package-sample/config.yaml index a6306e94..2ab91dba 100644 --- a/config_library/pattern-3/rvl-cdip-package-sample/config.yaml +++ b/config_library/pattern-3/rvl-cdip-package-sample/config.yaml @@ -11,6 +11,7 @@ ocr: - name: LAYOUT - name: TABLES - name: SIGNATURES + max_workers: "20" image: dpi: "150" target_width: "" @@ -766,6 +767,7 @@ classes: classification: model: Custom fine tuned UDOP model extraction: + max_workers: "20" image: target_width: "" target_height: "" @@ -926,6 +928,7 @@ summarization: You are a document summarization expert who can analyze and summarize documents from various domains including medical, financial, legal, and general business documents. Your task is to create a summary that captures the key information, main points, and important details from the document. Your output must be in valid JSON format. \nSummarization Style: Balanced\\nCreate a balanced summary that provides a moderate level of detail. Include the main points and key supporting information, while maintaining the document's overall structure. Aim for a comprehensive yet concise summary.\n Your output MUST be in valid JSON format with markdown content. You MUST strictly adhere to the output format specified in the instructions. assessment: enabled: true + max_workers: "20" image: target_height: "" target_width: "" diff --git a/idp_cli/pyproject.toml b/idp_cli/pyproject.toml index 03836a16..9e2130fd 100644 --- a/idp_cli/pyproject.toml +++ b/idp_cli/pyproject.toml @@ -9,32 +9,28 @@ build-backend = "setuptools.build_meta" name = "idp-cli" version = "0.4.5" description = "Command-line interface for IDP Accelerator batch document processing" -authors = [{name = "AWS"}] -license = {text = "MIT-0"} -requires-python = ">=3.10" +authors = [{ name = "AWS" }] +license = { text = "MIT-0" } +requires-python = ">=3.12" classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ - "click>=8.1.0", - "rich>=13.0.0", - "boto3>=1.28.0", - "botocore>=1.31.0", + "click>=8.1.0", + "rich>=13.0.0", + "boto3>=1.28.0", + "botocore>=1.31.0", ] [project.optional-dependencies] -test = [ - "pytest>=7.4.0", - "pytest-mock>=3.11.0", - "moto>=4.2.0", -] +test = ["pytest>=7.4.0", "pytest-mock>=3.11.0", "moto>=4.2.0"] [project.scripts] idp-cli = "idp_cli.cli:main" diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index bcf5440c..ccddbe67 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -2,27 +2,26 @@ # SPDX-License-Identifier: MIT-0 """ -Granular assessment service for evaluating document extraction confidence using LLMs. +Granular assessment service for evaluating document extraction confidence using Strands agents. -This module provides a more scalable approach to assessment by: -1. Breaking down assessments into smaller, focused inferences -2. Leveraging prompt caching to reduce costs -3. Using multi-threading for parallel processing -4. Adapting batch sizes based on attribute complexity +This module provides a scalable approach to assessment by: +1. Breaking down assessments into single-field focused tasks +2. Leveraging Strands agents with tool-based interaction +3. Using parallel async execution for performance +4. Maintaining assessment structure that mirrors extraction results """ import json import logging import os import time -from concurrent.futures import ThreadPoolExecutor, as_completed from dataclasses import dataclass -from typing import Any, Dict, Generator, List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple, Union -from idp_common import bedrock, image, metrics, s3, utils +from idp_common import image, metrics, s3, utils +from idp_common.assessment.strands_executor import execute_assessment_tasks_parallel from idp_common.config.models import IDPConfig from idp_common.config.schema_constants import ( - SCHEMA_DESCRIPTION, SCHEMA_ITEMS, SCHEMA_PROPERTIES, SCHEMA_TYPE, @@ -30,24 +29,35 @@ TYPE_OBJECT, X_AWS_IDP_CONFIDENCE_THRESHOLD, X_AWS_IDP_DOCUMENT_TYPE, - X_AWS_IDP_LIST_ITEM_DESCRIPTION, ) from idp_common.models import Document, Status -from idp_common.utils import check_token_limit, extract_json_from_text +from idp_common.utils import check_token_limit, grid_overlay logger = logging.getLogger(__name__) @dataclass class AssessmentTask: - """Represents a single assessment task to be processed.""" + """Single-field assessment task for Strands executor.""" task_id: str - task_type: str # 'simple_batch', 'group', 'list_item' - attributes: List[str] # Attribute names to assess - extraction_data: Dict[str, Any] # Relevant extraction data - confidence_thresholds: Dict[str, float] # Attribute -> threshold mapping - list_item_index: Optional[int] = None # For list items + task_type: str # Always "attribute" - single field assessment + + # Path to field as tuple: ("address", "street") or ("items", 0, "price") + field_path: Tuple[Union[str, int], ...] + + # The field name being assessed (last element of path) + field_name: str + + # Schema for this specific field only + field_schema: Dict[str, Any] + + # Confidence threshold for this field + confidence_threshold: float + + # Direct reference to parent container in assessment structure (for O(1) insertion) + # Can be Dict for regular fields or List for array items + parent_assessment_dict: Union[Dict[str, Any], List[Any]] @dataclass @@ -102,6 +112,42 @@ def _safe_float_conversion(value: Any, default: float = 0.0) -> float: return default +def _get_value_at_path(data: Dict[str, Any], path: Tuple[Union[str, int], ...]) -> Any: + """ + Navigate nested data structure using tuple path. + + Args: + data: Nested dictionary/list structure + path: Tuple of keys/indices like ("address", "street") or ("items", 0, "price") + + Returns: + Value at the specified path, or None if path doesn't exist + + Examples: + >>> data = {"address": {"street": "123 Main St"}} + >>> _get_value_at_path(data, ("address", "street")) + "123 Main St" + + >>> data = {"items": [{"price": 10.99}, {"price": 20.99}]} + >>> _get_value_at_path(data, ("items", 0, "price")) + 10.99 + """ + current = data + for key in path: + if current is None: + return None + if isinstance(current, dict): + current = current.get(key) + elif isinstance(current, list): + if isinstance(key, int) and 0 <= key < len(current): + current = current[key] + else: + return None + else: + return None + return current + + class GranularAssessmentService: """Enhanced assessment service with granular, cached, and parallel processing.""" @@ -130,18 +176,13 @@ def __init__( self.config = config_model self.region = region or os.environ.get("AWS_REGION") - # Granular processing configuration (type-safe access, Pydantic handles conversions) - self.max_workers = self.config.assessment.granular.max_workers - self.simple_batch_size = self.config.assessment.granular.simple_batch_size - self.list_batch_size = self.config.assessment.granular.list_batch_size + # Assessment configuration (granular is now always enabled) + self.max_workers = self.config.assessment.max_workers - # Ensure safe minimum values + # Ensure safe minimum value self.max_workers = max(1, self.max_workers) - self.simple_batch_size = max(1, self.simple_batch_size) - self.list_batch_size = max(1, self.list_batch_size) - # Auto-determine caching and parallel processing - # Caching is automatically handled by the bedrock client based on model support + # Auto-determine parallel processing # Parallel processing is enabled when max_workers > 1 self.enable_parallel = self.max_workers > 1 @@ -172,9 +213,7 @@ def __init__( model_id = self.config.assessment.model logger.info(f"Initialized granular assessment service with model {model_id}") logger.info( - f"Granular config: max_workers={self.max_workers}, " - f"simple_batch_size={self.simple_batch_size}, " - f"list_batch_size={self.list_batch_size}, " + f"Assessment config: max_workers={self.max_workers}, " f"parallel={self.enable_parallel}, " f"caching={'enabled' if self.cache_table else 'disabled'}" ) @@ -196,81 +235,6 @@ def _get_class_schema(self, class_label: str) -> Dict[str, Any]: return schema return {} - def _walk_properties_for_assessment( - self, properties: Dict[str, Any], parent_path: str = "" - ) -> Generator[Dict[str, Any], None, None]: - """ - Walk JSON Schema properties and yield assessment property information. - Generator pattern for efficient schema traversal. - - Args: - properties: JSON Schema properties dict - parent_path: Parent path for nested properties (e.g., "CompanyAddress") - - Yields: - Dict containing property information: - { - 'path': 'CompanyAddress.Street', # Full path - 'name': 'Street', # Property name - 'parent_path': 'CompanyAddress', # Parent path (empty string for top-level) - 'type': 'string', # JSON Schema type - 'description': 'Street address', - 'confidence_threshold': 0.9, # From x-aws-idp-confidence-threshold - 'prop_schema': {...} # Full property schema for reference - } - """ - for prop_name, prop_schema in properties.items(): - prop_type = prop_schema.get(SCHEMA_TYPE) - full_path = f"{parent_path}.{prop_name}" if parent_path else prop_name - - # Get confidence threshold for this property - threshold = prop_schema.get(X_AWS_IDP_CONFIDENCE_THRESHOLD) - - if prop_type == TYPE_OBJECT: - # Yield info for the group itself - yield { - "path": full_path, - "name": prop_name, - "parent_path": parent_path, - "type": TYPE_OBJECT, - "description": prop_schema.get(SCHEMA_DESCRIPTION, ""), - "confidence_threshold": threshold, - "prop_schema": prop_schema, - } - # Recurse into nested object properties - yield from self._walk_properties_for_assessment( - prop_schema.get(SCHEMA_PROPERTIES, {}), full_path - ) - - elif prop_type == TYPE_ARRAY: - # Yield info for the list itself - yield { - "path": full_path, - "name": prop_name, - "parent_path": parent_path, - "type": TYPE_ARRAY, - "description": prop_schema.get(SCHEMA_DESCRIPTION, ""), - "confidence_threshold": threshold, - "list_item_description": prop_schema.get( - X_AWS_IDP_LIST_ITEM_DESCRIPTION, "" - ), - "prop_schema": prop_schema, - } - # Note: We don't recurse into array items here because list items - # are handled specially in task creation (one task per item) - - else: - # Leaf property (simple type: string, number, boolean, etc.) - yield { - "path": full_path, - "name": prop_name, - "parent_path": parent_path, - "type": prop_type or "string", - "description": prop_schema.get(SCHEMA_DESCRIPTION, ""), - "confidence_threshold": threshold, - "prop_schema": prop_schema, - } - def _get_confidence_threshold_by_path( self, properties: Dict[str, Any], path: str, default: float = 0.9 ) -> float: @@ -315,294 +279,19 @@ def _get_confidence_threshold_by_path( return default - def _format_property_descriptions( - self, properties: Dict[str, Any], filter_names: Optional[List[str]] = None - ) -> str: - """ - Format property descriptions from JSON Schema properties for the prompt. - Can optionally filter to specific property names. - - Args: - properties: JSON Schema properties dict - filter_names: Optional list of property names to include (None = all) - - Returns: - Formatted property descriptions as a string - """ - formatted_lines = [] - - for prop_name, prop_schema in properties.items(): - # Skip if filtering and this property is not in the filter list - if filter_names is not None and prop_name not in filter_names: - continue - - prop_type = prop_schema.get(SCHEMA_TYPE) - description = prop_schema.get(SCHEMA_DESCRIPTION, "") - - if prop_type == TYPE_OBJECT: - formatted_lines.append(f"{prop_name} \t[ {description} ]") - nested_props = prop_schema.get(SCHEMA_PROPERTIES, {}) - for nested_name, nested_schema in nested_props.items(): - nested_desc = nested_schema.get(SCHEMA_DESCRIPTION, "") - formatted_lines.append(f" - {nested_name} \t[ {nested_desc} ]") - - elif prop_type == TYPE_ARRAY: - formatted_lines.append(f"{prop_name} \t[ {description} ]") - items_schema = prop_schema.get(SCHEMA_ITEMS, {}) - - item_desc = prop_schema.get(X_AWS_IDP_LIST_ITEM_DESCRIPTION, "") - if item_desc: - formatted_lines.append(f" Each item: {item_desc}") - - if items_schema.get(SCHEMA_TYPE) == TYPE_OBJECT: - item_props = items_schema.get(SCHEMA_PROPERTIES, {}) - for item_name, item_schema in item_props.items(): - item_prop_desc = item_schema.get(SCHEMA_DESCRIPTION, "") - formatted_lines.append( - f" - {item_name} \t[ {item_prop_desc} ]" - ) - else: - formatted_lines.append(f"{prop_name} \t[ {description} ]") - - return "\n".join(formatted_lines) - - def _get_attribute_confidence_threshold( - self, attr_name: str, attributes: List[Dict[str, Any]], default_threshold: float - ) -> float: - """ - Get confidence threshold (legacy format, for internal granular service use). - - Args: - attr_name: Name of the attribute - attributes: List of attribute dicts in legacy format - default_threshold: Default threshold if not found - - Returns: - Confidence threshold for the attribute - """ - for attr in attributes: - if attr.get("name") == attr_name: - return _safe_float_conversion( - attr.get("confidence_threshold", default_threshold), - default_threshold, - ) - - if attr.get("attributeType") == "group": - group_attributes = attr.get("groupAttributes", []) - for group_attr in group_attributes: - if group_attr.get("name") == attr_name: - return _safe_float_conversion( - group_attr.get("confidence_threshold", default_threshold), - default_threshold, - ) - - if attr.get("attributeType") == "list": - list_template = attr.get("listItemTemplate", {}) - item_attributes = list_template.get("itemAttributes", []) - for item_attr in item_attributes: - if item_attr.get("name") == attr_name: - return _safe_float_conversion( - item_attr.get("confidence_threshold", default_threshold), - default_threshold, - ) - - return default_threshold - - def _build_cached_prompt_base( - self, - document_text: str, - class_label: str, - attribute_descriptions: str, - ocr_text_confidence: str, - page_images: List[Any], - ) -> List[Dict[str, Any]]: - """ - Build the cacheable base portion of the assessment prompt using the configured task_prompt template. - This will be the same for all tasks and can be cached. - - Args: - document_text: The document text content - class_label: The document class label - attribute_descriptions: Formatted attribute names and descriptions (will be replaced per task) - ocr_text_confidence: Raw OCR results with confidence scores - page_images: List of page images - - Returns: - List of content items for the cacheable portion - """ - # Get the base task prompt template (type-safe access) - task_prompt_template = self.config.assessment.task_prompt - - if not task_prompt_template: - raise ValueError( - "Assessment task_prompt is required in configuration but not found" - ) - - # For granular assessment, we need to build the base content that will be cached - # and leave placeholders for task-specific content - - # Replace common placeholders but leave task-specific ones - base_substitutions = { - "DOCUMENT_TEXT": document_text, - "DOCUMENT_CLASS": class_label, - "OCR_TEXT_CONFIDENCE": ocr_text_confidence, - } - - # Replace placeholders in the template - base_prompt = task_prompt_template - for placeholder, value in base_substitutions.items(): - base_prompt = base_prompt.replace(f"{{{placeholder}}}", value) - - # Handle {DOCUMENT_IMAGE} placeholder if present - if "{DOCUMENT_IMAGE}" in base_prompt: - # Split the prompt at the DOCUMENT_IMAGE placeholder - parts = base_prompt.split("{DOCUMENT_IMAGE}") - if len(parts) != 2: - raise ValueError( - f"Invalid DOCUMENT_IMAGE placeholder usage: found {len(parts) - 1} occurrences, " - f"but exactly 1 is required." - ) - - content = [] - - # Add the part before the image - if parts[0].strip(): - content.append({"text": parts[0]}) - - # Add the images if available - if page_images: - if isinstance(page_images, list): - # Multiple images - no limit with latest Bedrock API - logger.info( - f"Attaching {len(page_images)} images to granular assessment prompt" - ) - for img in page_images: - content.append(image.prepare_bedrock_image_attachment(img)) - else: - # Single image - content.append(image.prepare_bedrock_image_attachment(page_images)) - - # Add the part after the image - if parts[1].strip(): - content.append({"text": parts[1]}) - - else: - # No DOCUMENT_IMAGE placeholder - just add the base prompt - content = [] - if base_prompt.strip(): - content.append({"text": base_prompt}) - - return content - - def _get_task_specific_attribute_descriptions( - self, task: AssessmentTask, properties: Dict[str, Any] - ) -> str: - """ - Get attribute descriptions specific to this task using JSON Schema properties. - - Args: - task: The assessment task - properties: JSON Schema properties dict - - Returns: - Formatted attribute descriptions for this specific task - """ - if task.task_type == "simple_batch": - # For simple batches, filter to only the attributes in this batch - return self._format_property_descriptions( - properties, filter_names=task.attributes - ) - - elif task.task_type == "group": - # For groups, filter to just the group attribute (which includes nested props) - group_attr_name = task.attributes[0] - return self._format_property_descriptions( - properties, filter_names=[group_attr_name] - ) - - elif task.task_type == "list_item": - # For list items, show the item schema properties - list_attr_name = task.attributes[0] - if list_attr_name in properties: - list_prop_schema = properties[list_attr_name] - items_schema = list_prop_schema.get(SCHEMA_ITEMS, {}) - if items_schema.get(SCHEMA_TYPE) == TYPE_OBJECT: - item_properties = items_schema.get(SCHEMA_PROPERTIES, {}) - return self._format_property_descriptions(item_properties) - return "" - - return "" - - def _build_specific_assessment_prompt( - self, - task: AssessmentTask, - base_content: List[Dict[str, Any]], - properties: Dict[str, Any], - ) -> List[Dict[str, Any]]: - """ - Build the specific assessment prompt for a task by replacing the {EXTRACTION_RESULTS} placeholder - in the base content with task-specific extraction data. - - Args: - task: The assessment task - base_content: The cached base content (which has empty {EXTRACTION_RESULTS}) - properties: JSON Schema properties dict for task-specific filtering - - Returns: - Complete content list for the assessment - """ - # Build extraction results for this specific task - task_extraction_data = {} - for attr_name in task.attributes: - if attr_name in task.extraction_data: - task_extraction_data[attr_name] = task.extraction_data[attr_name] - - # For list items, we need to handle the data differently - if task.task_type == "list_item": - extraction_results_str = json.dumps(task.extraction_data, indent=2) - item_index = task.list_item_index if task.list_item_index is not None else 0 - extraction_results_str = f"Item #{item_index + 1}: {extraction_results_str}" - else: - extraction_results_str = json.dumps(task_extraction_data, indent=2) - - # Get task-specific attribute descriptions - task_specific_attributes = self._get_task_specific_attribute_descriptions( - task, properties - ) - - # Create a new content list by replacing placeholders in the base content - content = [] - for item in base_content: - if "text" in item: - # Replace any remaining placeholders in the text - text = item["text"] - - # Replace EXTRACTION_RESULTS placeholder with task-specific data - text = text.replace("{EXTRACTION_RESULTS}", extraction_results_str) - - # Replace ATTRIBUTE_NAMES_AND_DESCRIPTIONS with task-specific attributes if needed - if "{ATTRIBUTE_NAMES_AND_DESCRIPTIONS}" in text: - text = text.replace( - "{ATTRIBUTE_NAMES_AND_DESCRIPTIONS}", task_specific_attributes - ) - - # Only add non-empty text content (must have actual content, not just whitespace) - if text.strip(): - content.append({"text": text}) - else: - # Non-text content (like images, cache points) - pass through unchanged - content.append(item) - - return content - def _create_assessment_tasks( self, extraction_results: Dict[str, Any], properties: Dict[str, Any], default_confidence_threshold: float, - ) -> List[AssessmentTask]: + ) -> Tuple[List[AssessmentTask], Dict[str, Any]]: """ - Create assessment tasks based on JSON Schema property types and extraction results. + Create assessment tasks and pre-build assessment structure. + + This function recursively traverses the schema and extraction results to: + 1. Build an assessment structure that mirrors the extraction results + 2. Create one task per leaf field (no batching - one field at a time) + 3. Store direct parent dict references in tasks for O(1) insertion Args: extraction_results: The extraction results to assess @@ -610,349 +299,119 @@ def _create_assessment_tasks( default_confidence_threshold: Default confidence threshold Returns: - List of assessment tasks + Tuple of (tasks, assessment_structure) + - tasks: List of AssessmentTask objects + - assessment_structure: Dict mirroring extraction_results shape """ - tasks = [] - task_counter = 0 + tasks: List[AssessmentTask] = [] + assessment_structure: Dict[str, Any] = {} + task_counter = [0] # Use list for mutable counter in nested function + + def _traverse( + schema_props: Dict[str, Any], + extraction_data: Dict[str, Any], + current_path: Tuple[Union[str, int], ...], + parent_dict: Dict[str, Any], + ) -> None: + """ + Recursively traverse schema and extraction data to build tasks and structure. + + Args: + schema_props: Current level schema properties + extraction_data: Current level extraction data + current_path: Tuple path to current location + parent_dict: Parent dict in assessment structure for insertion + """ + for prop_name, prop_schema in schema_props.items(): + if prop_name not in extraction_data: + continue # Skip properties not in extraction results - # Group properties by type for efficient processing - simple_props = [] - group_props = [] - list_props = [] + prop_type = prop_schema.get(SCHEMA_TYPE) + prop_value = extraction_data[prop_name] + field_path = current_path + (prop_name,) - for prop_name, prop_schema in properties.items(): - if prop_name not in extraction_results: - continue # Skip properties not in extraction results + if prop_type == TYPE_OBJECT and isinstance(prop_value, dict): + # Create nested dict in assessment structure + nested_dict: Dict[str, Any] = {} + parent_dict[prop_name] = nested_dict - prop_type = prop_schema.get(SCHEMA_TYPE) + # Recurse into nested object + nested_props = prop_schema.get(SCHEMA_PROPERTIES, {}) + _traverse(nested_props, prop_value, field_path, nested_dict) - if prop_type == TYPE_OBJECT: - group_props.append((prop_name, prop_schema)) - elif prop_type == TYPE_ARRAY: - list_props.append((prop_name, prop_schema)) - else: - # Simple types: string, number, boolean, etc. - simple_props.append((prop_name, prop_schema)) - - # Create tasks for simple properties (batch them) - for i in range(0, len(simple_props), self.simple_batch_size): - batch = simple_props[i : i + self.simple_batch_size] - prop_names = [name for name, _ in batch] - - # Build confidence thresholds for this batch - confidence_thresholds = {} - for prop_name, prop_schema in batch: - threshold = self._get_confidence_threshold_by_path( - properties, prop_name, default_confidence_threshold - ) - confidence_thresholds[prop_name] = threshold + elif prop_type == TYPE_ARRAY and isinstance(prop_value, list): + # Create list in assessment structure + assessment_list: List[Any] = [] + parent_dict[prop_name] = assessment_list - # Extract relevant data for this batch - batch_extraction_data = { - name: extraction_results[name] - for name in prop_names - if name in extraction_results - } + # Process each array item + items_schema = prop_schema.get(SCHEMA_ITEMS, {}) + item_type = items_schema.get(SCHEMA_TYPE) - task = AssessmentTask( - task_id=f"simple_batch_{task_counter}", - task_type="simple_batch", - attributes=prop_names, - extraction_data=batch_extraction_data, - confidence_thresholds=confidence_thresholds, - ) - tasks.append(task) - task_counter += 1 - - # Create tasks for group properties (one per group) - for prop_name, prop_schema in group_props: - # Build confidence thresholds for nested properties - confidence_thresholds = {} - nested_props = prop_schema.get(SCHEMA_PROPERTIES, {}) - for nested_name in nested_props.keys(): - nested_path = f"{prop_name}.{nested_name}" - threshold = self._get_confidence_threshold_by_path( - properties, nested_path, default_confidence_threshold - ) - confidence_thresholds[nested_name] = threshold - - task = AssessmentTask( - task_id=f"group_{task_counter}", - task_type="group", - attributes=[prop_name], - extraction_data={prop_name: extraction_results[prop_name]}, - confidence_thresholds=confidence_thresholds, - ) - tasks.append(task) - task_counter += 1 + for idx, item_value in enumerate(prop_value): + item_path = field_path + (idx,) - # Create tasks for list properties (one per list item) - for prop_name, prop_schema in list_props: - list_data = extraction_results.get(prop_name, []) + if item_type == TYPE_OBJECT and isinstance(item_value, dict): + # Create dict for this array item + item_dict: Dict[str, Any] = {} + assessment_list.append(item_dict) - if not isinstance(list_data, list): - logger.warning(f"List property {prop_name} is not a list, skipping") - continue + # Recurse into array item properties + item_props = items_schema.get(SCHEMA_PROPERTIES, {}) + _traverse(item_props, item_value, item_path, item_dict) - # Build confidence thresholds for list item properties - confidence_thresholds = {} - items_schema = prop_schema.get(SCHEMA_ITEMS, {}) - if items_schema.get(SCHEMA_TYPE) == TYPE_OBJECT: - item_props = items_schema.get(SCHEMA_PROPERTIES, {}) - for item_prop_name in item_props.keys(): - # For list items, the path includes the list name - item_path = f"{prop_name}.{item_prop_name}" - threshold = self._get_confidence_threshold_by_path( - properties, item_path, default_confidence_threshold - ) - confidence_thresholds[item_prop_name] = threshold + else: + # Leaf field in array - create task + threshold = self._get_confidence_threshold_by_path( + properties, + ".".join(str(p) for p in field_path), + default_confidence_threshold, + ) + + task = AssessmentTask( + task_id=f"task_{task_counter[0]}", + task_type="attribute", + field_path=item_path, + field_name=prop_name, + field_schema=items_schema, + confidence_threshold=threshold, + parent_assessment_dict=assessment_list, # type: ignore + ) + tasks.append(task) + task_counter[0] += 1 - # Create tasks for list items (batch them if configured) - for i in range(0, len(list_data), self.list_batch_size): - batch_end = min(i + self.list_batch_size, len(list_data)) + # Pre-allocate slot in list (will be replaced by assessment obj) + assessment_list.append(None) - for j in range(i, batch_end): - item_data = list_data[j] + else: + # Leaf field - create task + threshold = self._get_confidence_threshold_by_path( + properties, + ".".join(str(p) for p in field_path), + default_confidence_threshold, + ) task = AssessmentTask( - task_id=f"list_{prop_name}_item_{j}", - task_type="list_item", - attributes=[prop_name], - extraction_data=item_data, - confidence_thresholds=confidence_thresholds, - list_item_index=j, + task_id=f"task_{task_counter[0]}", + task_type="attribute", + field_path=field_path, + field_name=prop_name, + field_schema=prop_schema, + confidence_threshold=threshold, + parent_assessment_dict=parent_dict, ) tasks.append(task) - task_counter += 1 - - logger.info( - f"Created {len(tasks)} assessment tasks: " - f"{len([t for t in tasks if t.task_type == 'simple_batch'])} simple batches, " - f"{len([t for t in tasks if t.task_type == 'group'])} groups, " - f"{len([t for t in tasks if t.task_type == 'list_item'])} list items" - ) - - return tasks - - def _process_assessment_task( - self, - task: AssessmentTask, - base_content: List[Dict[str, Any]], - properties: Dict[str, Any], - model_id: str, - system_prompt: str, - temperature: float, - top_k: float, - top_p: float, - max_tokens: Optional[int], - ) -> AssessmentResult: - """ - Process a single assessment task. - - Args: - task: The assessment task to process - base_content: The cached base content - properties: JSON Schema properties dict - model_id: Bedrock model ID - system_prompt: System prompt - temperature: Temperature parameter - top_k: Top-k parameter - top_p: Top-p parameter - max_tokens: Max tokens parameter - - Returns: - Assessment result - """ - start_time = time.time() - - try: - # Build the complete prompt - content = self._build_specific_assessment_prompt( - task, base_content, properties - ) - - logger.debug( - f"Processing assessment task {task.task_id} with {len(task.attributes)} attributes" - ) - - # Invoke Bedrock - response_with_metering = bedrock.invoke_model( - model_id=model_id, - system_prompt=system_prompt, - content=content, - temperature=temperature, - top_k=top_k, - top_p=top_p, - max_tokens=max_tokens, - context="GranularAssessment", - ) - - # Extract text from response - assessment_text = bedrock.extract_text_from_response(response_with_metering) - metering = response_with_metering.get("metering", {}) - - # Parse response into JSON - assessment_data = {} - task_failed = False - error_messages = [] - try: - assessment_data = json.loads(extract_json_from_text(assessment_text)) - except Exception as e: - logger.error( - f"Error parsing assessment LLM output for task {task.task_id}: {e}" - ) - task_failed = True - error_messages.append( - f"Error parsing assessment LLM output for task {task.task_id}" - ) - # Create default assessments - for attr_name in task.attributes: - if task.task_type == "list_item": - # For list items, create assessments for each sub-attribute - assessment_data = {} - for ( - sub_attr_name, - threshold, - ) in task.confidence_thresholds.items(): - assessment_data[sub_attr_name] = { - "confidence": 0.5, - "confidence_reason": f"Unable to parse assessment response for {sub_attr_name} - default score assigned", - } - else: - assessment_data[attr_name] = { - "confidence": 0.5, - "confidence_reason": f"Unable to parse assessment response for {attr_name} - default score assigned", - } - - # Process bounding boxes automatically if bbox data is present - try: - logger.debug( - f"Checking for bounding box data in granular assessment task {task.task_id}" - ) - assessment_data = self._extract_geometry_from_assessment( - assessment_data - ) - except Exception as e: - logger.warning( - f"Failed to extract geometry data for task {task.task_id}: {str(e)}" - ) - # Continue with assessment even if geometry extraction fails - - # Check for confidence threshold alerts - confidence_alerts = [] - self._check_confidence_alerts_for_task( - task, assessment_data, confidence_alerts - ) + task_counter[0] += 1 - processing_time = time.time() - start_time - if task_failed: - return AssessmentResult( - task_id=task.task_id, - success=False, - assessment_data=assessment_data, - confidence_alerts=confidence_alerts, - error_message=self._convert_error_list_to_string(error_messages), - processing_time=processing_time, - ) - else: - return AssessmentResult( - task_id=task.task_id, - success=True, - assessment_data=assessment_data, - confidence_alerts=confidence_alerts, - processing_time=processing_time, - metering=metering, - ) - - except Exception as e: - processing_time = time.time() - start_time - logger.error(f"Error processing assessment task {task.task_id}: {str(e)}") - - return AssessmentResult( - task_id=task.task_id, - success=False, - assessment_data={}, - confidence_alerts=[], - error_message=str(e), - processing_time=processing_time, - ) - - def _check_confidence_alerts_for_task( - self, - task: AssessmentTask, - assessment_data: Dict[str, Any], - alerts_list: List[Dict[str, Any]], - ) -> None: - """ - Check assessment data for confidence threshold violations for a specific task. - - Args: - task: The assessment task - assessment_data: Dictionary containing assessment data - alerts_list: List to append alerts to (modified in place) - """ - if task.task_type == "simple_batch": - for attr_name in task.attributes: - if attr_name in assessment_data and isinstance( - assessment_data[attr_name], dict - ): - confidence = _safe_float_conversion( - assessment_data[attr_name].get("confidence", 0.0), 0.0 - ) - threshold = task.confidence_thresholds.get(attr_name, 0.9) - if confidence < threshold: - alerts_list.append( - { - "attribute_name": attr_name, - "confidence": confidence, - "confidence_threshold": threshold, - } - ) + # Pre-allocate placeholder in dict (will be replaced by assessment obj) + parent_dict[prop_name] = None - elif task.task_type == "group": - attr_name = task.attributes[0] # Group tasks have one attribute - if attr_name in assessment_data and isinstance( - assessment_data[attr_name], dict - ): - for sub_attr_name, sub_assessment in assessment_data[attr_name].items(): - if ( - isinstance(sub_assessment, dict) - and "confidence" in sub_assessment - ): - confidence = _safe_float_conversion( - sub_assessment.get("confidence", 0.0), 0.0 - ) - threshold = task.confidence_thresholds.get(sub_attr_name, 0.9) - if confidence < threshold: - alerts_list.append( - { - "attribute_name": f"{attr_name}.{sub_attr_name}", - "confidence": confidence, - "confidence_threshold": threshold, - } - ) + # Start recursive traversal from root + _traverse(properties, extraction_results, (), assessment_structure) - elif task.task_type == "list_item": - attr_name = task.attributes[0] # List item tasks have one attribute - item_index = task.list_item_index if task.list_item_index is not None else 0 + logger.info(f"Created {len(tasks)} assessment tasks (one per leaf field)") - for item_attr_name, item_assessment in assessment_data.items(): - if ( - isinstance(item_assessment, dict) - and "confidence" in item_assessment - ): - confidence = _safe_float_conversion( - item_assessment.get("confidence", 0.0), 0.0 - ) - threshold = task.confidence_thresholds.get(item_attr_name, 0.9) - if confidence < threshold: - alerts_list.append( - { - "attribute_name": f"{attr_name}[{item_index}].{item_attr_name}", - "confidence": confidence, - "confidence_threshold": threshold, - } - ) + return tasks, assessment_structure def _get_cache_key( self, document_id: str, workflow_execution_arn: str, section_id: str @@ -1132,8 +591,8 @@ def _is_throttling_exception(self, exception: Exception) -> bool: Returns: True if exception indicates throttling, False otherwise """ - if hasattr(exception, "response") and "Error" in exception.response: # type: ignore[attr-defined] - error_code = exception.response["Error"]["Code"] # type: ignore[attr-defined] + if hasattr(exception, "response") and "Error" in exception.response: # pyright: ignore[reportAttributeAccessIssue] + error_code = exception.response["Error"]["Code"] # pyright: ignore[reportAttributeAccessIssue] return error_code in self.throttling_exceptions # Check exception class name and message for throttling indicators @@ -1149,34 +608,33 @@ def _aggregate_assessment_results( self, tasks: List[AssessmentTask], results: List[AssessmentResult], - extraction_results: Dict[str, Any], + assessment_structure: Dict[str, Any], ) -> Tuple[Dict[str, Any], List[Dict[str, Any]], Dict[str, Any]]: """ - Aggregate individual task results into the final assessment structure. + Aggregate individual task results into assessment structure using direct parent insertion. Args: tasks: List of assessment tasks results: List of assessment results - extraction_results: Original extraction results + assessment_structure: Pre-built assessment structure from _create_assessment_tasks Returns: - Tuple of (enhanced_assessment_data, confidence_alerts, aggregated_metering) + Tuple of (assessment_structure, confidence_alerts, aggregated_metering) """ - enhanced_assessment_data = {} all_confidence_alerts = [] aggregated_metering = {} # Create a mapping from task_id to result result_map = {result.task_id: result for result in results} - # Process results by task type + # Process each task result - direct O(1) insertion using parent reference for task in tasks: result = result_map.get(task.task_id) if not result or not result.success: logger.warning(f"Task {task.task_id} failed or missing result") continue - # Aggregate metering data using the same pattern as classification service + # Aggregate metering data if result.metering: aggregated_metering = utils.merge_metering_data( aggregated_metering, result.metering @@ -1185,85 +643,48 @@ def _aggregate_assessment_results( # Add confidence alerts all_confidence_alerts.extend(result.confidence_alerts) - # Process assessment data based on task type - if task.task_type == "simple_batch": - for attr_name in task.attributes: - if attr_name in result.assessment_data: - # Add confidence threshold to the assessment - assessment_value = result.assessment_data[attr_name] - if isinstance(assessment_value, dict): - assessment = assessment_value.copy() - threshold = task.confidence_thresholds.get(attr_name, 0.9) - assessment["confidence_threshold"] = threshold - enhanced_assessment_data[attr_name] = assessment - else: - logger.warning( - f"Unexpected assessment data type for {attr_name}: {type(assessment_value)}" - ) + # Get assessment data from result - should be a single assessment object + # The Strands agent returns the assessment in result.assessment_data + assessment_obj = result.assessment_data - elif task.task_type == "group": - attr_name = task.attributes[0] - if attr_name in result.assessment_data: - assessment_value = result.assessment_data[attr_name] - if isinstance(assessment_value, dict): - group_assessment = {} - for sub_attr_name, sub_assessment in assessment_value.items(): - if isinstance(sub_assessment, dict): - enhanced_sub_assessment = sub_assessment.copy() - threshold = task.confidence_thresholds.get( - sub_attr_name, 0.9 - ) - enhanced_sub_assessment["confidence_threshold"] = ( - threshold - ) - group_assessment[sub_attr_name] = ( - enhanced_sub_assessment - ) - else: - logger.warning( - f"Unexpected sub-assessment data type for {attr_name}.{sub_attr_name}: {type(sub_assessment)}" - ) - group_assessment[sub_attr_name] = sub_assessment - enhanced_assessment_data[attr_name] = group_assessment - else: - logger.warning( - f"Unexpected group assessment data type for {attr_name}: {type(assessment_value)}" - ) - - elif task.task_type == "list_item": - attr_name = task.attributes[0] - item_index = ( - task.list_item_index if task.list_item_index is not None else 0 + if not isinstance(assessment_obj, dict): + logger.warning( + f"Task {task.task_id}: expected dict assessment, got {type(assessment_obj)}" ) + continue - # Initialize list structure if not exists - if attr_name not in enhanced_assessment_data: - enhanced_assessment_data[attr_name] = [] - - # Ensure the list is long enough for this item - while len(enhanced_assessment_data[attr_name]) <= item_index: - enhanced_assessment_data[attr_name].append({}) - - # Add assessments for this list item - item_assessment = {} - for ( - item_attr_name, - item_assessment_data, - ) in result.assessment_data.items(): - if isinstance(item_assessment_data, dict): - enhanced_item_assessment = item_assessment_data.copy() - threshold = task.confidence_thresholds.get(item_attr_name, 0.9) - enhanced_item_assessment["confidence_threshold"] = threshold - item_assessment[item_attr_name] = enhanced_item_assessment + # Add confidence_threshold to the assessment object + assessment_obj["confidence_threshold"] = task.confidence_threshold + + # Direct insertion using parent reference - O(1) operation! + parent = task.parent_assessment_dict + field_name = task.field_name + + if isinstance(parent, dict): + # Regular field - insert into parent dict + parent[field_name] = assessment_obj + elif isinstance(parent, list): + # Array item - get index from field_path + # field_path is like ("items", 0, "price") - second-to-last is the index + if len(task.field_path) >= 2 and isinstance(task.field_path[-2], int): + idx = task.field_path[-2] + # Replace the None placeholder we created during structure building + if idx < len(parent): + parent[idx] = assessment_obj else: logger.warning( - f"Unexpected list item assessment data type for {attr_name}[{item_index}].{item_attr_name}: {type(item_assessment_data)}" + f"Task {task.task_id}: index {idx} out of range for list of length {len(parent)}" ) - item_assessment[item_attr_name] = item_assessment_data - - enhanced_assessment_data[attr_name][item_index] = item_assessment + else: + logger.warning( + f"Task {task.task_id}: cannot determine array index from path {task.field_path}" + ) + else: + logger.warning( + f"Task {task.task_id}: unexpected parent type {type(parent)}" + ) - return enhanced_assessment_data, all_confidence_alerts, aggregated_metering + return assessment_structure, all_confidence_alerts, aggregated_metering def _get_text_confidence_data(self, page) -> str: """ @@ -1579,8 +1000,6 @@ def process_document_section(self, document: Document, section_id: str) -> Docum # Get assessment configuration (type-safe, Pydantic handles conversions) model_id = self.config.assessment.model temperature = self.config.assessment.temperature - top_k = self.config.assessment.top_k - top_p = self.config.assessment.top_p max_tokens = self.config.assessment.max_tokens system_prompt = self.config.assessment.system_prompt @@ -1597,17 +1016,8 @@ def process_document_section(self, document: Document, section_id: str) -> Docum self.config.assessment.default_confidence_threshold ) - # Build the cached base prompt (without attribute descriptions - those are task-specific) - base_content = self._build_cached_prompt_base( - document_text, - class_label, - "", # Empty attribute descriptions - will be replaced per task - ocr_text_confidence, - page_images, - ) - - # Create assessment tasks - tasks = self._create_assessment_tasks( + # Create assessment tasks and pre-built assessment structure + tasks, assessment_structure = self._create_assessment_tasks( extraction_results, properties, default_confidence_threshold ) @@ -1622,12 +1032,6 @@ def process_document_section(self, document: Document, section_id: str) -> Docum all_task_results = list(cached_task_results.values()) combined_metering = {} - # Use thread-safe error collection (similar to classification service) - import threading - - errors_lock = threading.Lock() - failed_task_exceptions = {} # Store original exceptions for failed tasks - # Determine which tasks need processing tasks_to_process = [] for task in tasks: @@ -1646,106 +1050,52 @@ def process_document_section(self, document: Document, section_id: str) -> Docum f"Found {len(cached_task_results)} cached assessment task results, processing {len(tasks_to_process)} remaining tasks" ) - # Time the model invocations + # Apply grid overlay to page images for assessment + grid_page_images = [] + for page_img in page_images: + grid_img = grid_overlay.add_grid_overlay(page_img) + grid_page_images.append(grid_img) + + # Execute tasks using Strands-based parallel executor + logger.info( + f"Processing {len(tasks_to_process)} assessment tasks with Strands executor (max_concurrent={self.max_workers})" + ) + request_start_time = time.time() - # Process tasks (parallel or sequential based on configuration) - if self.enable_parallel and len(tasks_to_process) > 1: - logger.info( - f"Processing {len(tasks_to_process)} assessment tasks in parallel with {self.max_workers} workers" + # Call Strands executor - handles both parallel and sequential based on max_concurrent + task_results, task_metering, processing_time = ( + execute_assessment_tasks_parallel( + tasks=tasks_to_process, + extraction_results=extraction_results, + page_images=grid_page_images, + sorted_page_ids=sorted_page_ids, + model_id=model_id, + system_prompt=system_prompt, + temperature=temperature, + max_tokens=max_tokens, + max_concurrent=self.max_workers, ) + ) - with ThreadPoolExecutor(max_workers=self.max_workers) as executor: - # Submit all uncached tasks - future_to_task = { - executor.submit( - self._process_assessment_task, - task, - base_content, - properties, - model_id, - system_prompt, - temperature, - top_k, - top_p, - max_tokens, - ): task - for task in tasks_to_process - } + # Merge results and metering + all_task_results.extend(task_results) + combined_metering = utils.merge_metering_data( + combined_metering, task_metering + ) - # Collect results with enhanced error handling - for future in as_completed(future_to_task): - task = future_to_task[future] - try: - result = future.result() - all_task_results.append(result) - - # Merge metering data - if result.metering: - combined_metering = utils.merge_metering_data( - combined_metering, result.metering - ) - except Exception as e: - # Capture exception details for later use - error_msg = f"Error processing assessment task {task.task_id}: {str(e)}" - logger.error(error_msg) - with errors_lock: - document.errors.append(error_msg) - # Store the original exception for later analysis - failed_task_exceptions[task.task_id] = e - - # Create failed result - failed_result = AssessmentResult( - task_id=task.task_id, - success=False, - assessment_data={}, - confidence_alerts=[], - error_message=str(e), - ) - all_task_results.append(failed_result) - else: - logger.info( - f"Processing {len(tasks_to_process)} assessment tasks sequentially" - ) - request_start_time = time.time() + logger.info( + f"Strands executor completed {len(task_results)} tasks in {processing_time:.2f}s" + ) - for task in tasks_to_process: - try: - result = self._process_assessment_task( - task, - base_content, - properties, - model_id, - system_prompt, - temperature, - top_k, - top_p, - max_tokens, - ) - all_task_results.append(result) - - # Merge metering data - if result.metering: - combined_metering = utils.merge_metering_data( - combined_metering, result.metering - ) - except Exception as e: - # Capture exception details for later use - error_msg = f"Error processing assessment task {task.task_id}: {str(e)}" - logger.error(error_msg) - document.errors.append(error_msg) - # Store the original exception for later analysis - failed_task_exceptions[task.task_id] = e - - # Create failed result - failed_result = AssessmentResult( - task_id=task.task_id, - success=False, - assessment_data={}, - confidence_alerts=[], - error_message=str(e), - ) - all_task_results.append(failed_result) + # Track failed tasks for metadata + failed_task_exceptions = {} + for result in task_results: + if not result.success and result.error_message: + # Create a simple exception object for compatibility + failed_task_exceptions[result.task_id] = Exception( + result.error_message + ) # Store failed task exceptions in document metadata for caller to access if failed_task_exceptions: @@ -1824,12 +1174,12 @@ def process_document_section(self, document: Document, section_id: str) -> Docum f"Time taken for granular assessment: {total_duration:.2f} seconds" ) - # Aggregate results + # Aggregate results into pre-built assessment structure ( enhanced_assessment_data, confidence_threshold_alerts, aggregated_metering, - ) = self._aggregate_assessment_results(tasks, results, extraction_results) + ) = self._aggregate_assessment_results(tasks, results, assessment_structure) # Calculate success metrics successful_tasks = [r for r in results if r.success] diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py new file mode 100644 index 00000000..ba91fc5b --- /dev/null +++ b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py @@ -0,0 +1,244 @@ +""" +Parallel executor for Strands-based assessment tasks. + +This module provides asyncio-based parallel execution of assessment tasks +with concurrency control via semaphores. +""" + +import asyncio +import os +import time +from typing import Any, cast + +from aws_lambda_powertools import Logger + +from idp_common.assessment.strands_service import ( + AssessmentResult, + AssessmentTask, + assess_attribute_with_strands, +) +from idp_common.utils import merge_metering_data + +logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) + + +async def execute_tasks_async( + tasks: list[AssessmentTask], + extraction_results: dict[str, Any], + page_images: list[bytes], + sorted_page_ids: list[str], + model_id: str, + system_prompt: str, + temperature: float, + max_tokens: int, + max_concurrent: int = 5, + max_retries: int = 7, + connect_timeout: float = 10.0, + read_timeout: float = 300.0, +) -> tuple[list[AssessmentResult], dict[str, Any]]: + """ + Execute assessment tasks in parallel using asyncio. + + Args: + tasks: List of assessment tasks to execute + base_content: Base prompt content + extraction_results: Full extraction results + page_images: List of page images (with grid overlay) + sorted_page_ids: List of page IDs + model_id: Model to use + system_prompt: System prompt + temperature: Model temperature + max_tokens: Max tokens + max_concurrent: Maximum concurrent tasks (default 5) + max_retries: Maximum retry attempts + connect_timeout: Connection timeout in seconds + read_timeout: Read timeout in seconds + + Returns: + Tuple of (results, combined_metering) + """ + logger.info( + f"Starting parallel execution of {len(tasks)} assessment tasks with max_concurrent={max_concurrent}" + ) + + # Create semaphore to limit concurrency + semaphore = asyncio.Semaphore(max_concurrent) + + async def execute_with_semaphore(task: AssessmentTask) -> AssessmentResult: + """Execute task with semaphore to limit concurrency.""" + async with semaphore: + logger.debug( + f"Executing task {task.task_id} (type: {task.task_type})", + extra={"task_id": task.task_id, "task_type": task.task_type}, + ) + return await assess_attribute_with_strands( + task=task, + extraction_results=extraction_results, + page_images=page_images, + sorted_page_ids=sorted_page_ids, + model_id=model_id, + system_prompt=system_prompt, + temperature=temperature, + max_tokens=max_tokens, + max_retries=max_retries, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + + # Execute all tasks concurrently (with semaphore limit) + # Use return_exceptions=True to capture failures without stopping others + results = await asyncio.gather( + *[execute_with_semaphore(task) for task in tasks], + return_exceptions=True, + ) + + # Process results and handle exceptions + processed_results: list[AssessmentResult] = [] + combined_metering: dict[str, Any] = {} + + for i, result in enumerate(results): + if isinstance(result, Exception): + # Convert exception to failed AssessmentResult + logger.error( + f"Task {tasks[i].task_id} failed with exception", + extra={ + "task_id": tasks[i].task_id, + "exception": str(result), + "exception_type": type(result).__name__, + }, + ) + processed_results.append( + AssessmentResult( + task_id=tasks[i].task_id, + success=False, + assessment_data={}, + confidence_alerts=[], + error_message=str(result), + processing_time=0.0, + ) + ) + else: + # result is AssessmentResult here (not Exception) + assessment_result = cast(AssessmentResult, result) + processed_results.append(assessment_result) + # Merge metering data + if assessment_result.metering: + combined_metering = merge_metering_data( + combined_metering, assessment_result.metering + ) + + # Log summary + successful_tasks = sum(1 for r in processed_results if r.success) + failed_tasks = len(processed_results) - successful_tasks + + logger.info( + f"Completed {len(processed_results)} tasks: {successful_tasks} successful, {failed_tasks} failed", + extra={ + "total_tasks": len(processed_results), + "successful": successful_tasks, + "failed": failed_tasks, + }, + ) + + return processed_results, combined_metering + + +def execute_assessment_tasks_parallel( + tasks: list[AssessmentTask], + extraction_results: dict[str, Any], + page_images: list[bytes], + sorted_page_ids: list[str], + model_id: str, + system_prompt: str, + temperature: float, + max_tokens: int, + max_concurrent: int = 5, + max_retries: int = 7, + connect_timeout: float = 10.0, + read_timeout: float = 300.0, +) -> tuple[list[AssessmentResult], dict[str, Any], float]: + """ + Execute assessment tasks in parallel (synchronous wrapper). + + This is the main entry point called from process_document_section. + It wraps the async executor and provides synchronous interface. + + Args: + tasks: List of assessment tasks + base_content: Base prompt content + extraction_results: Full extraction results + page_images: List of page images (with grid overlay already applied) + sorted_page_ids: List of page IDs in sorted order + model_id: Model ID + system_prompt: System prompt + temperature: Temperature + max_tokens: Max tokens + max_concurrent: Max concurrent tasks (default 5) + max_retries: Maximum retry attempts + connect_timeout: Connection timeout in seconds + read_timeout: Read timeout in seconds + + Returns: + Tuple of (results, metering, duration) + """ + logger.info( + f"Starting parallel assessment execution for {len(tasks)} tasks", + extra={"num_tasks": len(tasks), "max_concurrent": max_concurrent}, + ) + + start_time = time.time() + + # Run async executor + # Use asyncio.run() for clean event loop management + try: + results, metering = asyncio.run( + execute_tasks_async( + tasks=tasks, + extraction_results=extraction_results, + page_images=page_images, + sorted_page_ids=sorted_page_ids, + model_id=model_id, + system_prompt=system_prompt, + temperature=temperature, + max_tokens=max_tokens, + max_concurrent=max_concurrent, + max_retries=max_retries, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + ) + except RuntimeError as e: + # Handle case where event loop already exists (shouldn't happen in Lambda) + if "There is no current event loop" in str(e) or "asyncio.run()" in str(e): + logger.warning( + "Event loop already exists, using get_event_loop", + extra={"error": str(e)}, + ) + loop = asyncio.get_event_loop() + results, metering = loop.run_until_complete( + execute_tasks_async( + tasks=tasks, + extraction_results=extraction_results, + page_images=page_images, + sorted_page_ids=sorted_page_ids, + model_id=model_id, + system_prompt=system_prompt, + temperature=temperature, + max_tokens=max_tokens, + max_concurrent=max_concurrent, + max_retries=max_retries, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + ) + else: + raise + + duration = time.time() - start_time + + logger.info( + f"Parallel assessment execution completed in {duration:.2f}s", + extra={"duration_seconds": duration, "num_tasks": len(tasks)}, + ) + + return results, metering, duration diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_models.py b/lib/idp_common_pkg/idp_common/assessment/strands_models.py new file mode 100644 index 00000000..80cc261e --- /dev/null +++ b/lib/idp_common_pkg/idp_common/assessment/strands_models.py @@ -0,0 +1,76 @@ +""" +Pydantic models for Strands-based assessment structured output. + +These models define the structured data format that Strands agents return +when assessing document extraction confidence with bounding boxes. +""" + +from typing import Any + +from pydantic import BaseModel, Field + + +class BoundingBox(BaseModel): + """Bounding box coordinates in normalized 0-1000 scale.""" + + x1: int = Field(..., ge=0, le=1000, description="Top-left X coordinate") + y1: int = Field(..., ge=0, le=1000, description="Top-left Y coordinate") + x2: int = Field(..., ge=0, le=1000, description="Bottom-right X coordinate") + y2: int = Field(..., ge=0, le=1000, description="Bottom-right Y coordinate") + page: int = Field(..., ge=1, description="Page number (1-indexed)") + + def to_geometry(self) -> dict[str, Any]: + """ + Convert to IDP geometry format. + + Returns: + Dictionary with BoundingBox and Page in IDP format + """ + return { + "BoundingBox": { + "Width": (self.x2 - self.x1) / 1000.0, + "Height": (self.y2 - self.y1) / 1000.0, + "Left": self.x1 / 1000.0, + "Top": self.y1 / 1000.0, + }, + "Page": self.page, + } + + +class ConfidenceAssessment(BaseModel): + """Confidence assessment for an attribute value.""" + + value: Any = Field(..., description="The extracted value") + confidence: float = Field(..., ge=0.0, le=1.0, description="Confidence score 0-1") + reasoning: str = Field(..., description="Explanation for the confidence score") + threshold: float = Field( + ..., ge=0.0, le=1.0, description="Required confidence threshold" + ) + bounding_box: BoundingBox | None = Field( + None, description="Location of value in document" + ) + + @property + def meets_threshold(self) -> bool: + """Computed field: whether confidence meets threshold.""" + return self.confidence >= self.threshold + + +class AssessmentOutput(BaseModel): + """ + Structured output for confidence assessment of a single field. + + Each task assesses exactly ONE field (e.g., "name" or "address.street"). + The assessment is directly the ConfidenceAssessment for that field. + """ + + field_name: str = Field( + ..., + description="The name/path of the field being assessed (e.g., 'name' or 'address.street')", + ) + assessment: ConfidenceAssessment = Field( + ..., description="Confidence assessment for this specific field" + ) + alerts: list[str] = Field( + default_factory=list, description="Any confidence threshold alerts or issues" + ) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py new file mode 100644 index 00000000..cb665e98 --- /dev/null +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -0,0 +1,391 @@ +""" +Core assessment service using Strands agents with interactive tools. + +This module provides the main assessment functions that use Strands agents +to assess extraction confidence with bounding boxes and interactive image viewing. +""" + +import json +import os +import time +from typing import Any + +from aws_lambda_powertools import Logger +from botocore.config import Config +from pydantic import BaseModel +from strands import Agent, tool +from strands.agent.conversation_manager import SummarizingConversationManager +from strands.models.bedrock import BedrockModel +from strands.types.content import ContentBlock, Message + +from idp_common.assessment.strands_models import AssessmentOutput +from idp_common.assessment.strands_tools import create_strands_tools + +logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) + + +# Pydantic versions of AssessmentTask/Result for Strands compatibility +# Note: granular_service has dataclass versions - these are separate for Strands +class AssessmentTask(BaseModel): + """Assessment task definition.""" + + task_id: str + task_type: str + attributes: list[str] + task_schema: dict[str, Any] + confidence_thresholds: dict[str, float] + + +class AssessmentResult(BaseModel): + """Assessment result.""" + + task_id: str + success: bool + assessment_data: dict[str, Any] + confidence_alerts: list[dict[str, Any]] + error_message: str | None = None + processing_time: float = 0.0 + metering: dict[str, Any] | None = None + + +def create_submit_assessment_tool(): + """ + Create a tool for submitting assessment results. + + Returns: + A Strands tool function for submitting assessments + """ + + @tool + def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: + """ + Submit your final confidence assessment. + + Use this tool when you have: + 1. Located the values in the document images + 2. Determined precise bounding box coordinates using ruler markings + 3. Assessed the confidence based on clarity and accuracy + + Args: + assessment: Dictionary with: + - assessments: dict mapping attribute names to ConfidenceAssessment + - alerts: list of any threshold alerts (optional) + + Returns: + Success confirmation message or validation error details + """ + # Validate assessment structure and return helpful errors + validated_assessment = AssessmentOutput(**assessment) # pyright: ignore[reportCallIssue] + + # Store in agent state + agent.state.set("assessment_output", validated_assessment.model_dump()) + + logger.info( + "Assessment submitted successfully", + extra={"assessment": validated_assessment.model_dump()}, + ) + + return "Assessment submitted successfully. You can now finish the task." + + return submit_assessment + + +async def assess_attribute_with_strands( + task: AssessmentTask, + extraction_results: dict[str, Any], + page_images: list[bytes], + sorted_page_ids: list[str], + model_id: str, + system_prompt: str, + temperature: float, + max_tokens: int, + max_retries: int = 7, + connect_timeout: float = 10.0, + read_timeout: float = 300.0, +) -> AssessmentResult: + """ + Assess attributes using Strands agent with interactive tools. + + Args: + task: Assessment task to process + base_content: Base prompt content (includes images) + extraction_results: Full extraction results + page_images: List of page images (with grid overlay already applied) + sorted_page_ids: List of page IDs in sorted order + model_id: Bedrock model ID + system_prompt: System prompt for assessment + temperature: Model temperature + max_tokens: Max tokens for response + max_retries: Maximum retry attempts for API calls + connect_timeout: Connection timeout in seconds + read_timeout: Read timeout in seconds + + Returns: + AssessmentResult with structured assessment data + """ + start_time = time.time() + + try: + # 1. Create tools (image viewer + todo list + submit assessment) + base_tools = create_strands_tools(page_images, sorted_page_ids) + submit_tool = create_submit_assessment_tool() + tools = base_tools + [submit_tool] + + # 2. Build task-specific prompt + task_prompt = _build_task_prompt(task, extraction_results, len(page_images)) + + # 3. Create Bedrock model config (following agentic_idp.py pattern) + boto_config = Config( + retries={ + "max_attempts": max_retries, + "mode": "adaptive", + }, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + + model_config = { + "model_id": model_id, + "temperature": temperature, + "max_tokens": max_tokens, + "boto_client_config": boto_config, + } + + # 4. Initialize Strands agent + agent = Agent( + model=BedrockModel(**model_config), + tools=tools, + system_prompt=system_prompt, + state={ + "task": task.model_dump(), + "extraction_results": extraction_results, + "assessment_output": None, + }, + conversation_manager=SummarizingConversationManager( + summary_ratio=0.8, preserve_recent_messages=2 + ), + ) + + # 5. Create user message with task prompt + user_message = Message(role="user", content=[ContentBlock(text=task_prompt)]) + + # 6. Run agent + logger.info( + "Starting Strands assessment", + extra={ + "task_id": task.task_id, + "task_type": task.task_type, + "attributes": task.attributes, + }, + ) + + response = await agent.invoke_async([user_message]) + + logger.debug("Agent response received", extra={"task_id": task.task_id}) + + # 7. Extract assessment from agent state + assessment_dict = agent.state.get("assessment_output") + if not assessment_dict: + raise ValueError( + "Agent did not produce assessment output. Agent may not have called submit_assessment tool." + ) + + # Validate to Pydantic model + assessment_output = AssessmentOutput(**assessment_dict) + + # Validate that agent assessed exactly the expected field + expected_field = task.attributes[0] # Task assesses exactly one field + if assessment_output.field_name != expected_field: + raise ValueError( + f"Agent assessed wrong field: expected '{expected_field}', " + f"got '{assessment_output.field_name}'" + ) + + # 8. Extract metering from response + metering = {} + if response.metrics and response.metrics.accumulated_usage: + token_usage = { + "inputTokens": response.metrics.accumulated_usage.get("inputTokens", 0), + "outputTokens": response.metrics.accumulated_usage.get( + "outputTokens", 0 + ), + "totalTokens": response.metrics.accumulated_usage.get("totalTokens", 0), + "cacheReadInputTokens": response.metrics.accumulated_usage.get( + "cacheReadInputTokens", 0 + ), + "cacheWriteInputTokens": response.metrics.accumulated_usage.get( + "cacheWriteInputTokens", 0 + ), + } + metering[f"assessment/bedrock/{model_id}"] = token_usage + + # 9. Convert to AssessmentResult format + result = _convert_to_assessment_result( + task, + assessment_output, + metering, + time.time() - start_time, + ) + + logger.info( + "Assessment completed successfully", + extra={ + "task_id": task.task_id, + "processing_time": result.processing_time, + "success": result.success, + }, + ) + + return result + + except Exception as e: + # Return failed result + logger.error( + "Assessment failed", + extra={ + "task_id": task.task_id, + "error": str(e), + "processing_time": time.time() - start_time, + }, + ) + + return AssessmentResult( + task_id=task.task_id, + success=False, + assessment_data={}, + confidence_alerts=[], + error_message=str(e), + processing_time=time.time() - start_time, + ) + + +def _build_task_prompt( + task: AssessmentTask, + extraction_results: dict[str, Any], + num_images: int, +) -> str: + """ + Build prompt for assessing a single field. + + Includes: + - Clear field path (e.g., "address.street" or "items[2].price") + - Full extraction results for context + - Schema and threshold for the specific field + - Instructions for using images and tools + + Args: + task: Assessment task for one specific field + extraction_results: Complete extraction results (arbitrarily nested) + num_images: Number of available page images + + Returns: + Formatted prompt string + """ + # Get the single field being assessed + field_path = task.attributes[ + 0 + ] # e.g., "name" or "address.street" or "items[0].price" + threshold = list(task.confidence_thresholds.values())[0] + + prompt = f"""# Confidence Assessment Task + +You are assessing the confidence of a SINGLE extracted field from a document. + +## Field to Assess +**Field Path**: `{field_path}` +**Confidence Threshold**: {threshold} + +## Complete Extraction Results +(Full document context - locate the value for `{field_path}`) +{json.dumps(extraction_results, indent=2)} + +## Field Schema +{json.dumps(task.task_schema, indent=2)} + +## Your Task +Assess ONLY the field `{field_path}`. Do not assess any other fields. + +## Available Document Images + +You have access to {num_images} document page images (indices 0-{num_images - 1}). +Each image has ruler markings along the edges showing the 0-1000 coordinate scale. + +Use the `view_image` tool to: +1. View images to locate the extracted values +2. Draw bounding boxes to verify coordinates +3. Check if values are clearly visible and readable + +## Assessment Process + +1. **Plan**: Use `create_todo_list` to organize your assessment steps +2. **Locate**: Use `view_image` to find each value in the document +3. **Coordinate**: Determine precise bounding box coordinates using ruler markings (0-1000 scale) +4. **Assess**: Evaluate confidence based on: + - Text clarity and OCR quality + - Value correctness compared to what you see in the image + - Bounding box accuracy +5. **Submit**: Use `submit_assessment` tool with your final assessment + +## Bounding Box Format + +Bounding boxes use normalized 0-1000 coordinates: +- x1, y1: Top-left corner +- x2, y2: Bottom-right corner +- page: Page number (1-indexed) + +Example: {{"x1": 150, "y1": 220, "x2": 380, "y2": 245, "page": 1}} + +## Output Schema + +Your assessment must match the {task.task_type} schema. +Use the `submit_assessment` tool when ready with a complete assessment dict. + +**Important**: You MUST call `submit_assessment` to complete this task. +""" + return prompt + + +def _convert_to_assessment_result( + task: AssessmentTask, + output: AssessmentOutput, + metering: dict[str, Any], + processing_time: float, +) -> AssessmentResult: + """Convert Strands AssessmentOutput to AssessmentResult.""" + + # Single field assessment + field_name = output.field_name + assessment = output.assessment + + # Build assessment data with confidence score + assessment_data = { + field_name: { + "confidence": assessment.confidence, + "value": assessment.value, + "reasoning": assessment.reasoning, + } + } + + # Add geometry if bounding box provided + if assessment.bounding_box: + assessment_data[field_name]["Geometry"] = assessment.bounding_box.to_geometry() + + # Check for confidence threshold violations + confidence_alerts = [] + if not assessment.meets_threshold: + confidence_alerts.append( + { + "attribute_name": field_name, + "confidence": assessment.confidence, + "confidence_threshold": assessment.threshold, + } + ) + + return AssessmentResult( + task_id=task.task_id, + success=True, + assessment_data=assessment_data, + confidence_alerts=confidence_alerts, + processing_time=processing_time, + metering=metering, + ) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py new file mode 100644 index 00000000..6ab9fd33 --- /dev/null +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -0,0 +1,161 @@ +""" +Strands tools for confidence assessment with image viewing and grid overlay. + +This module provides tools for Strands agents to view document pages and +mark bounding boxes during confidence assessment tasks. +""" + +import os +from typing import Any + +from aws_lambda_powertools import Logger +from pydantic import BaseModel, Field +from strands import Agent, tool + +from idp_common.assessment.strands_models import BoundingBox +from idp_common.utils.grid_overlay import draw_bounding_boxes +from idp_common.utils.strands_agent_tools.todo_list import ( + create_todo_list, + update_todo, + view_todo_list, +) + +logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) + + +class ViewImageInput(BaseModel): + """Input model for view_image tool.""" + + image_index: int = Field( + ..., ge=0, description="Index of the page image to view (0-based)" + ) + bounding_box: BoundingBox | None = Field( + None, description="Optional bounding box to highlight on the image" + ) + label: str | None = Field(None, description="Optional label for the bounding box") + + +def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) -> Any: + """ + Create a view_image tool that has access to page images. + + Args: + page_images: List of page image bytes (with grid overlay already applied) + sorted_page_ids: List of page IDs in sorted order + + Returns: + A Strands tool function for viewing images + """ + + @tool + def view_image(input_data: dict[str, Any], agent: Agent) -> str: + """ + View a specific page image, optionally highlighting a bounding box area. + + Use this tool to examine document pages when assessing confidence. + You can specify a bounding box to highlight a specific region. + + Args: + input_data: Dictionary with: + - image_index (int): Index of page to view (0-based) + - bounding_box (dict, optional): Bounding box with x1, y1, x2, y2, page + - label (str, optional): Label for the bounding box + + Returns: + Success message with image details + + Example: + view_image({ + "image_index": 0, + "bounding_box": {"x1": 100, "y1": 200, "x2": 300, "y2": 250, "page": 1}, + "label": "Account Number" + }, agent) + """ + # Validate input - let ValidationError propagate + view_input = ViewImageInput(**input_data) + + # Validate image index exists + if view_input.image_index >= len(page_images): + raise ValueError( + f"Invalid image_index {view_input.image_index}. " + f"Valid range: 0-{len(page_images) - 1}" + ) + + # Get the base image (already has grid overlay) + img_bytes = page_images[view_input.image_index] + page_id = sorted_page_ids[view_input.image_index] + + # If bounding box is specified, draw it on the image + if view_input.bounding_box: + # Convert BoundingBox to dict format for draw_bounding_boxes + bbox_dict = { + "bbox": [ + view_input.bounding_box.x1, + view_input.bounding_box.y1, + view_input.bounding_box.x2, + view_input.bounding_box.y2, + ], + "label": view_input.label or "Highlighted Region", + "color": "red", + } + + # Draw the bounding box on the image (which already has ruler) + # Let drawing errors propagate - if we can't draw, something is wrong + img_bytes = draw_bounding_boxes( + img_bytes, + [bbox_dict], + has_ruler=True, + ruler_width=30, + ) + + logger.debug( + "Drew bounding box on image", + extra={ + "image_index": view_input.image_index, + "bbox": bbox_dict["bbox"], + }, + ) + + # Store the image in agent state using standardized key + image_key = f"page_{view_input.image_index}_{page_id}" + agent.state.set(image_key, img_bytes) + + logger.info( + "Stored image in agent state", + extra={ + "image_index": view_input.image_index, + "page_id": page_id, + "has_bbox": view_input.bounding_box is not None, + }, + ) + + bbox_info = "" + if view_input.bounding_box: + bbox_info = f"\nHighlighted region: [{view_input.bounding_box.x1}, {view_input.bounding_box.y1}, {view_input.bounding_box.x2}, {view_input.bounding_box.y2}] on page {view_input.bounding_box.page}" + if view_input.label: + bbox_info += f'\nLabel: "{view_input.label}"' + + return f"Showing page {view_input.image_index} (Page ID: {page_id}){bbox_info}\nUse the coordinate grid (0-1000 scale) to specify bounding boxes." + + return view_image + + +def create_strands_tools( + page_images: list[bytes], sorted_page_ids: list[str] +) -> list[Any]: + """ + Create all tools needed for Strands-based assessment. + + Args: + page_images: List of page image bytes (with grid overlay already applied) + sorted_page_ids: List of page IDs in sorted order + + Returns: + List of Strands tool functions + """ + return [ + create_view_image_tool(page_images, sorted_page_ids), + create_todo_list, + update_todo, + view_todo_list, + ] diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 0d5f4bf8..e0fa885f 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -233,25 +233,6 @@ def validate_section_splitting(cls, v: Any) -> str: return v -class GranularAssessmentConfig(BaseModel): - """Granular assessment configuration""" - - enabled: bool = Field(default=False, description="Enable granular assessment") - list_batch_size: int = Field(default=1, gt=0) - simple_batch_size: int = Field(default=3, gt=0) - max_workers: int = Field(default=20, gt=0) - - @field_validator( - "list_batch_size", "simple_batch_size", "max_workers", mode="before" - ) - @classmethod - def parse_int(cls, v: Any) -> int: - """Parse int from string or number""" - if isinstance(v, str): - return int(v) if v else 0 - return int(v) - - class AssessmentConfig(BaseModel): """Document assessment configuration""" @@ -334,7 +315,10 @@ class AssessmentConfig(BaseModel): default_confidence_threshold: float = Field(default=0.8, ge=0.0, le=1.0) validation_enabled: bool = Field(default=False, description="Enable validation") image: ImageConfig = Field(default_factory=ImageConfig) - granular: GranularAssessmentConfig = Field(default_factory=GranularAssessmentConfig) + # Granular assessment settings (always enabled, no longer nested) + max_workers: int = Field( + default=20, gt=0, description="Max concurrent workers for parallel assessment" + ) @field_validator( "temperature", "top_p", "top_k", "default_confidence_threshold", mode="before" @@ -346,7 +330,7 @@ def parse_float(cls, v: Any) -> float: return float(v) if v else 0.0 return float(v) - @field_validator("max_tokens", mode="before") + @field_validator("max_tokens", "max_workers", mode="before") @classmethod def parse_int(cls, v: Any) -> int: """Parse int from string or number""" diff --git a/lib/idp_common_pkg/idp_common/extraction/models.py b/lib/idp_common_pkg/idp_common/extraction/models.py index a207adbb..2347c712 100644 --- a/lib/idp_common_pkg/idp_common/extraction/models.py +++ b/lib/idp_common_pkg/idp_common/extraction/models.py @@ -10,6 +10,8 @@ from dataclasses import dataclass from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field + @dataclass class ExtractedAttribute: @@ -41,3 +43,78 @@ class PageInfo: text_uri: Optional[str] = None image_uri: Optional[str] = None raw_text_uri: Optional[str] = None + + +class DocumentClassInfo(BaseModel): + """Document classification information""" + + type: str = Field(description="Document class/type") + + +class SplitDocumentInfo(BaseModel): + """Information about document splitting""" + + page_indices: List[int] = Field( + default_factory=list, description="Page indices in the split document" + ) + + +class ExtractionMetadata(BaseModel): + """Metadata about the extraction process""" + + parsing_succeeded: bool = Field( + default=True, description="Whether parsing succeeded" + ) + extraction_time_seconds: Optional[float] = Field( + default=None, description="Time taken for extraction" + ) + skipped_due_to_empty_attributes: Optional[bool] = Field( + default=None, + description="Whether extraction was skipped due to empty attributes", + ) + assessment_time_seconds: Optional[float] = Field( + default=None, description="Time taken for assessment" + ) + granular_assessment_used: Optional[bool] = Field( + default=None, description="Whether granular assessment was used" + ) + assessment_tasks_total: Optional[int] = Field( + default=None, description="Total number of assessment tasks" + ) + assessment_tasks_successful: Optional[int] = Field( + default=None, description="Number of successful assessment tasks" + ) + assessment_tasks_failed: Optional[int] = Field( + default=None, description="Number of failed assessment tasks" + ) + + +class ExtractionData(BaseModel): + """ + Complete extraction data structure stored in S3. + + This model represents the JSON structure written to S3 containing + extraction results, assessment information, and metadata. + """ + + document_class: DocumentClassInfo = Field( + description="Document classification information" + ) + split_document: SplitDocumentInfo = Field( + default_factory=SplitDocumentInfo, + description="Information about document splitting", + ) + inference_result: Dict[str, Any] = Field( + default_factory=dict, description="Extracted data from the document" + ) + explainability_info: Optional[List[Dict[str, Any]]] = Field( + default=None, description="Assessment/explainability information" + ) + metadata: ExtractionMetadata = Field( + default_factory=ExtractionMetadata, + description="Extraction and assessment metadata", + ) + + class Config: + # Allow extra fields for forward compatibility + extra = "allow" diff --git a/lib/idp_common_pkg/idp_common/image/__init__.py b/lib/idp_common_pkg/idp_common/image/__init__.py index da867890..65cbdd33 100644 --- a/lib/idp_common_pkg/idp_common/image/__init__.py +++ b/lib/idp_common_pkg/idp_common/image/__init__.py @@ -1,30 +1,33 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 -from PIL import Image, ImageFilter, ImageChops, ImageOps +from PIL import Image, ImageFilter, ImageChops, ImageOps, ImageDraw, ImageFont import io import logging -from typing import Tuple, Optional, Dict, Any, Union +from typing import Tuple, Optional, Dict, Any, Union, List from ..s3 import get_binary_content from ..utils import parse_s3_uri logger = logging.getLogger(__name__) -def resize_image(image_data: bytes, - target_width: Optional[int] = None, - target_height: Optional[int] = None, - allow_upscale: bool = False) -> bytes: + +def resize_image( + image_data: bytes, + target_width: Optional[int] = None, + target_height: Optional[int] = None, + allow_upscale: bool = False, +) -> bytes: """ Resize an image to fit within target dimensions while preserving aspect ratio. No padding, no distortion - pure proportional scaling. Preserves original format when possible. - + Args: image_data: Raw image bytes target_width: Target width in pixels (None or empty string = no resize) target_height: Target height in pixels (None or empty string = no resize) allow_upscale: Whether to allow making the image larger than original - + Returns: Resized image bytes in original format (or JPEG if format cannot be preserved) """ @@ -33,153 +36,176 @@ def resize_image(image_data: bytes, target_width = None if isinstance(target_height, str) and not target_height.strip(): target_height = None - + # If either dimension is None, return original image unchanged if target_width is None or target_height is None: - logger.info("No resize requested (width or height is None/empty), returning original image") + logger.info( + "No resize requested (width or height is None/empty), returning original image" + ) return image_data - + # Convert to int if needed try: target_width = int(target_width) target_height = int(target_height) except (ValueError, TypeError): - logger.warning(f"Invalid resize dimensions: width={target_width}, height={target_height}, returning original image") + logger.warning( + f"Invalid resize dimensions: width={target_width}, height={target_height}, returning original image" + ) return image_data image = Image.open(io.BytesIO(image_data)) current_width, current_height = image.size original_format = image.format # Store original format - + # Calculate scaling factor to fit within bounds while preserving aspect ratio width_ratio = target_width / current_width height_ratio = target_height / current_height scale_factor = min(width_ratio, height_ratio) # Fit within bounds - + # Determine if resizing is needed needs_resize = (scale_factor < 1.0) or (allow_upscale and scale_factor > 1.0) - + if needs_resize: new_width = int(current_width * scale_factor) new_height = int(current_height * scale_factor) - logger.info(f"Resizing image from {current_width}x{current_height} to {new_width}x{new_height} (scale: {scale_factor:.3f})") + logger.info( + f"Resizing image from {current_width}x{current_height} to {new_width}x{new_height} (scale: {scale_factor:.3f})" + ) image = image.resize((new_width, new_height), Image.LANCZOS) - + # Save in original format if possible img_byte_array = io.BytesIO() - + # Determine save format - use original if available, otherwise JPEG - if original_format and original_format in ['JPEG', 'PNG', 'GIF', 'BMP', 'TIFF', 'WEBP']: + if original_format and original_format in [ + "JPEG", + "PNG", + "GIF", + "BMP", + "TIFF", + "WEBP", + ]: save_format = original_format else: - save_format = 'JPEG' + save_format = "JPEG" logger.info(f"Converting from {original_format or 'unknown'} to JPEG") - + # Prepare save parameters save_kwargs = {"format": save_format} - + # Add quality parameters for JPEG - if save_format in ['JPEG', 'JPG']: + if save_format in ["JPEG", "JPG"]: save_kwargs["quality"] = 95 # High quality save_kwargs["optimize"] = True - + # Handle format-specific requirements - if save_format == 'PNG' and image.mode not in ['RGBA', 'LA', 'L', 'P']: + if save_format == "PNG" and image.mode not in ["RGBA", "LA", "L", "P"]: # PNG requires specific modes - if image.mode == 'CMYK': - image = image.convert('RGB') - + if image.mode == "CMYK": + image = image.convert("RGB") + image.save(img_byte_array, **save_kwargs) return img_byte_array.getvalue() else: # No resizing needed - return original data unchanged - logger.info(f"Image {current_width}x{current_height} already fits within {target_width}x{target_height}, returning original") + logger.info( + f"Image {current_width}x{current_height} already fits within {target_width}x{target_height}, returning original" + ) return image_data -def prepare_image(image_source: Union[str, bytes], - target_width: Optional[int] = None, - target_height: Optional[int] = None, - allow_upscale: bool = False) -> bytes: + +def prepare_image( + image_source: Union[str, bytes], + target_width: Optional[int] = None, + target_height: Optional[int] = None, + allow_upscale: bool = False, +) -> bytes: """ Prepare an image for model input from either S3 URI or raw bytes - + Args: image_source: Either an S3 URI (s3://bucket/key) or raw image bytes target_width: Target width in pixels (None or empty string = no resize) target_height: Target height in pixels (None or empty string = no resize) allow_upscale: Whether to allow making the image larger than original - + Returns: Processed image bytes ready for model input (preserves format when possible) """ # Get the image data - if isinstance(image_source, str) and image_source.startswith('s3://'): + if isinstance(image_source, str) and image_source.startswith("s3://"): image_data = get_binary_content(image_source) elif isinstance(image_source, bytes): image_data = image_source else: - raise ValueError(f"Invalid image source: {type(image_source)}. Must be S3 URI or bytes.") - + raise ValueError( + f"Invalid image source: {type(image_source)}. Must be S3 URI or bytes." + ) + # Resize and process return resize_image(image_data, target_width, target_height, allow_upscale) + def apply_adaptive_binarization(image_data: bytes) -> bytes: """ Apply adaptive binarization using Pillow-only implementation. - + This preprocessing step can significantly improve OCR accuracy on documents with: - Uneven lighting or shadows - Low contrast text - Background noise or gradients - + Implements adaptive mean thresholding similar to OpenCV's ADAPTIVE_THRESH_MEAN_C with block_size=15 and C=10. - + Args: image_data: Raw image bytes - + Returns: Processed image as JPEG bytes with adaptive binarization applied """ try: # Convert bytes to PIL Image pil_image = Image.open(io.BytesIO(image_data)) - + # Convert to grayscale if not already - if pil_image.mode != 'L': - pil_image = pil_image.convert('L') - + if pil_image.mode != "L": + pil_image = pil_image.convert("L") + # Apply adaptive thresholding using Pillow operations block_size = 15 C = 10 - + # Create a blurred version for local mean calculation # Use BoxBlur with radius = block_size // 2 to approximate local mean radius = block_size // 2 blurred = pil_image.filter(ImageFilter.BoxBlur(radius)) - + # Apply adaptive threshold: original > (blurred - C) ? 255 : 0 # Load pixel data for efficient access width, height = pil_image.size original_pixels = list(pil_image.getdata()) blurred_pixels = list(blurred.getdata()) - + binary_pixels = [] # Apply thresholding pixel by pixel for orig, blur in zip(original_pixels, blurred_pixels): threshold = blur - C binary_pixels.append(255 if orig > threshold else 0) - + # Create binary image - binary_image = Image.new('L', (width, height)) + binary_image = Image.new("L", (width, height)) binary_image.putdata(binary_pixels) - + # Convert to JPEG bytes img_byte_array = io.BytesIO() binary_image.save(img_byte_array, format="JPEG") - - logger.debug("Applied adaptive binarization preprocessing (Pillow implementation)") + + logger.debug( + "Applied adaptive binarization preprocessing (Pillow implementation)" + ) return img_byte_array.getvalue() - + except Exception as e: logger.error(f"Error applying adaptive binarization: {str(e)}") # Return original image if preprocessing fails @@ -190,28 +216,18 @@ def apply_adaptive_binarization(image_data: bytes) -> bytes: def prepare_bedrock_image_attachment(image_data: bytes) -> Dict[str, Any]: """ Format an image for Bedrock API attachment - + Args: image_data: Raw image bytes - + Returns: Formatted image attachment for Bedrock API """ # Detect image format from image data image = Image.open(io.BytesIO(image_data)) - format_mapping = { - 'JPEG': 'jpeg', - 'PNG': 'png', - 'GIF': 'gif', - 'WEBP': 'webp' - } + format_mapping = {"JPEG": "jpeg", "PNG": "png", "GIF": "gif", "WEBP": "webp"} detected_format = format_mapping.get(image.format) if not detected_format: raise ValueError(f"Unsupported image format: {image.format}") logger.info(f"Detected image format: {detected_format}") - return { - "image": { - "format": detected_format, - "source": {"bytes": image_data} - } - } + return {"image": {"format": detected_format, "source": {"bytes": image_data}}} diff --git a/lib/idp_common_pkg/idp_common/utils/grid_overlay.py b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py new file mode 100644 index 00000000..9430e471 --- /dev/null +++ b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py @@ -0,0 +1,387 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Grid overlay module for adding coordinate references to document images. + +This module provides functions to add ruler-style coordinate grids to images, +enabling LLMs to provide precise bounding box coordinates for extracted fields. +""" + +import io +import logging + +from PIL import Image, ImageDraw, ImageFont + +logger = logging.getLogger(__name__) + + +def add_ruler_edges( + image_data: bytes, + ruler_width: int = 30, + tick_interval: int = 50, + label_interval: int = 100, + ruler_color: tuple[int, int, int, int] = (240, 240, 240, 255), + tick_color: str = "black", + label_color: str = "black", + font_size: int = 12, +) -> bytes: + """ + Add ruler-style edges to the image (like graph paper margins). + Document content remains completely unobscured. + + Args: + image_data: Raw image bytes (JPEG, PNG, etc.) + ruler_width: Width of the ruler margin in pixels + tick_interval: Spacing between minor tick marks (in 0-1000 scale) + label_interval: Spacing between labeled major tick marks (in 0-1000 scale) + ruler_color: Background color of ruler (RGBA tuple) + tick_color: Color of tick marks + label_color: Color of coordinate labels + font_size: Font size for labels + + Returns: + Image bytes with ruler edges added (JPEG format) + """ + image = Image.open(io.BytesIO(image_data)).convert("RGBA") + orig_width, orig_height = image.size + + logger.info(f"Adding ruler edges to image {orig_width}x{orig_height}") + + # Create canvas with ruler margins on top and left + new_width = orig_width + ruler_width + new_height = orig_height + ruler_width + + canvas = Image.new("RGBA", (new_width, new_height), (255, 255, 255, 255)) + + draw = ImageDraw.Draw(canvas) + + # Create ruler backgrounds + # Top ruler (horizontal) - for X coordinates + draw.rectangle([(ruler_width, 0), (new_width, ruler_width)], fill=ruler_color) + # Left ruler (vertical) - for Y coordinates + draw.rectangle([(0, ruler_width), (ruler_width, new_height)], fill=ruler_color) + # Corner square + draw.rectangle([(0, 0), (ruler_width, ruler_width)], fill=ruler_color) + + # Paste original image offset by ruler width + canvas.paste(image, (ruler_width, ruler_width)) + + # Load font + font = _load_font(font_size) + small_font = _load_font(max(font_size - 2, 8)) + + # Draw tick marks and labels on TOP ruler (X-axis) + for i in range(0, 1001, tick_interval): + pixel_x = ruler_width + int((i / 1000.0) * orig_width) + + if i % label_interval == 0: + # Major tick with label + draw.line( + [(pixel_x, ruler_width - 12), (pixel_x, ruler_width)], + fill=tick_color, + width=2, + ) + # Center the label above the tick + label = str(i) + bbox = draw.textbbox((0, 0), label, font=font) + label_width = bbox[2] - bbox[0] + draw.text( + (pixel_x - label_width // 2, 2), + label, + fill=label_color, + font=font, + ) + else: + # Minor tick (no label) + draw.line( + [(pixel_x, ruler_width - 6), (pixel_x, ruler_width)], + fill=tick_color, + width=1, + ) + + # Draw tick marks and labels on LEFT ruler (Y-axis) + for i in range(0, 1001, tick_interval): + pixel_y = ruler_width + int((i / 1000.0) * orig_height) + + if i % label_interval == 0: + # Major tick with label + draw.line( + [(ruler_width - 12, pixel_y), (ruler_width, pixel_y)], + fill=tick_color, + width=2, + ) + # Right-align the label + label = str(i) + bbox = draw.textbbox((0, 0), label, font=font) + label_width = bbox[2] - bbox[0] + draw.text( + (ruler_width - label_width - 14, pixel_y - 6), + label, + fill=label_color, + font=font, + ) + else: + # Minor tick + draw.line( + [(ruler_width - 6, pixel_y), (ruler_width, pixel_y)], + fill=tick_color, + width=1, + ) + + # Add origin marker in corner (skip if using default font that doesn't support sizing) + try: + draw.text((2, 2), "0", fill=label_color, font=small_font) + except (OSError, AttributeError): + logger.debug("Skipping origin label - font rendering issue") + + canvas = canvas.convert("RGB") + img_byte_array = io.BytesIO() + canvas.save(img_byte_array, format="JPEG", quality=95) + + logger.info(f"Ruler edges added. New size: {new_width}x{new_height}") + return img_byte_array.getvalue() + + +def draw_bounding_boxes( + image_data: bytes, + bboxes: list[dict], + has_ruler: bool = False, + ruler_width: int = 30, + box_color: str = "red", + box_width: int = 3, + label_font_size: int = 12, + show_labels: bool = True, +) -> bytes: + """ + Draw bounding boxes on an image using normalized 0-1000 coordinates. + + Args: + image_data: Raw image bytes + bboxes: List of bounding box dictionaries, each containing: + - 'bbox': [x1, y1, x2, y2] in 0-1000 normalized scale + - 'label': Optional label text for the box + - 'color': Optional color override for this box + - 'page': Optional page number (for multi-page docs) + has_ruler: If True, account for ruler margins in coordinate calculation + ruler_width: Width of ruler margin (only used if has_ruler=True) + box_color: Default color for bounding boxes + box_width: Line width for bounding boxes + label_font_size: Font size for box labels + show_labels: Whether to show labels on boxes + + Returns: + Image bytes with bounding boxes drawn + + Example: + bboxes = [ + { + 'bbox': [150, 220, 380, 245], + 'label': 'Account Number', + 'color': 'green' + }, + { + 'bbox': [100, 300, 500, 330], + 'label': 'Balance' + } + ] + result = draw_bounding_boxes(image_data, bboxes) + """ + image = Image.open(io.BytesIO(image_data)).convert("RGBA") + width, height = image.size + + # If image has ruler edges, calculate the actual document area + if has_ruler: + doc_width = width - ruler_width + doc_height = height - ruler_width + offset_x = ruler_width + offset_y = ruler_width + else: + doc_width = width + doc_height = height + offset_x = 0 + offset_y = 0 + + # Create overlay for semi-transparent boxes + overlay = Image.new("RGBA", (width, height), (0, 0, 0, 0)) + draw = ImageDraw.Draw(overlay) + + font = _load_font(label_font_size) + + for i, bbox_info in enumerate(bboxes): + bbox = bbox_info.get("bbox", []) + if len(bbox) != 4: + logger.warning(f"Invalid bbox format at index {i}: {bbox}") + continue + + x1_norm, y1_norm, x2_norm, y2_norm = bbox + + # Convert from 0-1000 scale to pixel coordinates + x1_pixel = offset_x + int((x1_norm / 1000.0) * doc_width) + y1_pixel = offset_y + int((y1_norm / 1000.0) * doc_height) + x2_pixel = offset_x + int((x2_norm / 1000.0) * doc_width) + y2_pixel = offset_y + int((y2_norm / 1000.0) * doc_height) + + # Get color for this box + color = bbox_info.get("color", box_color) + + # Draw rectangle outline + draw.rectangle( + [(x1_pixel, y1_pixel), (x2_pixel, y2_pixel)], + outline=color, + width=box_width, + ) + + # Add semi-transparent fill + fill_color = _get_rgba_color(color, alpha=50) + draw.rectangle( + [ + (x1_pixel + box_width, y1_pixel + box_width), + (x2_pixel - box_width, y2_pixel - box_width), + ], + fill=fill_color, + ) + + # Add label if provided + if show_labels and "label" in bbox_info: + label = bbox_info["label"] + + # Draw label background + label_bbox = draw.textbbox((0, 0), label, font=font) + label_width = label_bbox[2] - label_bbox[0] + label_height = label_bbox[3] - label_bbox[1] + + # Position label above the box + label_x = x1_pixel + label_y = y1_pixel - label_height - 4 + + # If label would go off top of image, put it below the box + if label_y < offset_y: + label_y = y2_pixel + 2 + + # Draw label background + draw.rectangle( + [ + (label_x - 2, label_y - 2), + (label_x + label_width + 2, label_y + label_height + 2), + ], + fill=(255, 255, 255, 220), + ) + + # Draw label text + draw.text((label_x, label_y), label, fill=color, font=font) + + # Add coordinate annotation + coord_text = f"[{x1_norm},{y1_norm},{x2_norm},{y2_norm}]" + coord_bbox = draw.textbbox((0, 0), coord_text, font=_load_font(8)) + coord_width = coord_bbox[2] - coord_bbox[0] + + # Position coordinates at bottom-right of box + coord_x = x2_pixel - coord_width - 2 + coord_y = y2_pixel + 2 + + draw.rectangle( + [(coord_x - 1, coord_y - 1), (coord_x + coord_width + 1, coord_y + 10)], + fill=(255, 255, 255, 200), + ) + draw.text((coord_x, coord_y), coord_text, fill="gray", font=_load_font(8)) + + # Composite overlay onto original image + result = Image.alpha_composite(image, overlay) + result = result.convert("RGB") + + img_byte_array = io.BytesIO() + result.save(img_byte_array, format="JPEG", quality=95) + + logger.info(f"Drew {len(bboxes)} bounding boxes on image") + return img_byte_array.getvalue() + + +def add_ruler_and_draw_boxes( + image_data: bytes, + bboxes: list[dict], + ruler_width: int = 30, + tick_interval: int = 50, + label_interval: int = 100, + box_color: str = "red", + box_width: int = 3, +) -> bytes: + """ + Convenience function to add ruler edges and draw bounding boxes in one step. + + Args: + image_data: Raw image bytes + bboxes: List of bounding box dictionaries + ruler_width: Width of ruler margin + tick_interval: Spacing between minor ticks + label_interval: Spacing between major ticks + box_color: Default color for boxes + box_width: Line width for boxes + + Returns: + Image bytes with ruler and bounding boxes + """ + # First add ruler edges + image_with_ruler = add_ruler_edges( + image_data, + ruler_width=ruler_width, + tick_interval=tick_interval, + label_interval=label_interval, + ) + + # Then draw bounding boxes (accounting for ruler offset) + result = draw_bounding_boxes( + image_with_ruler, + bboxes, + has_ruler=True, + ruler_width=ruler_width, + box_color=box_color, + box_width=box_width, + ) + + return result + + +def _load_font(size: int): + """Load a font, falling back to default if not available.""" + font_paths = [ + "/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf", + "/usr/share/fonts/truetype/liberation/LiberationSans-Regular.ttf", + "/System/Library/Fonts/Helvetica.ttc", + "/Library/Fonts/Arial.ttf", + "/Library/Fonts/Arial Unicode.ttf", + "C:\\Windows\\Fonts\\arial.ttf", + ] + + for font_path in font_paths: + try: + return ImageFont.truetype(font_path, size) + except (OSError, IOError): + continue + + # Fall back to default font without size (it's fixed size) + logger.warning( + "Could not load TrueType font from standard paths, using PIL default" + ) + return ImageFont.load_default() + + +def _get_rgba_color(color_name: str, alpha: int = 255) -> tuple[int, int, int, int]: + """Convert color name to RGBA tuple.""" + color_map = { + "red": (255, 0, 0, alpha), + "green": (0, 255, 0, alpha), + "blue": (0, 0, 255, alpha), + "yellow": (255, 255, 0, alpha), + "orange": (255, 165, 0, alpha), + "purple": (128, 0, 128, alpha), + "cyan": (0, 255, 255, alpha), + "magenta": (255, 0, 255, alpha), + "lime": (0, 255, 0, alpha), + "pink": (255, 192, 203, alpha), + "black": (0, 0, 0, alpha), + "white": (255, 255, 255, alpha), + "gray": (128, 128, 128, alpha), + } + + return color_map.get(color_name.lower(), (255, 0, 0, alpha)) diff --git a/lib/idp_common_pkg/pyproject.toml b/lib/idp_common_pkg/pyproject.toml index fe787afe..14e933fd 100644 --- a/lib/idp_common_pkg/pyproject.toml +++ b/lib/idp_common_pkg/pyproject.toml @@ -19,7 +19,7 @@ name = "idp_common" version = "0.4.6" description = "Common utilities for GenAI IDP Accelerator patterns" authors = [{ name = "AWS", email = "noreply@amazon.com" }] -requires-python = ">=3.10,<3.14" +requires-python = ">=3.12,<3.14" dependencies = [ "boto3==1.42.0", # Core dependency for AWS services "jsonschema>=4.25.1", @@ -66,7 +66,7 @@ classification = [ # Extraction module dependencies extraction = [ - "Pillow==11.2.1", # For image handling + "Pillow==11.2.1", # For image handling ] # Assessment module dependencies @@ -100,24 +100,24 @@ appsync = ["requests==2.32.4"] agents = [ "strands-agents==1.14.0; python_version>='3.10'", "strands-agents-tools==0.2.13; python_version>='3.10'", - "bedrock-agentcore>=0.1.1; python_version>='3.10'", # Specifically for the code interpreter tool + "bedrock-agentcore>=0.1.1; python_version>='3.10'", # Specifically for the code interpreter tool ] # Code intelligence module dependencies code_intel = [ - "requests==2.32.4", - "strands-agents==1.14.0", - "strands-agents-tools==0.2.13", - "bedrock-agentcore>=0.1.1", - "PyYAML>=6.0.0", - "pathspec>=0.11.0", - "chardet>=5.0.0" + "requests==2.32.4", + "strands-agents==1.14.0", + "strands-agents-tools==0.2.13", + "bedrock-agentcore>=0.1.1", + "PyYAML>=6.0.0", + "pathspec>=0.11.0", + "chardet>=5.0.0", ] # Document service factory dependencies (includes both appsync and dynamodb support) # This includes all dependencies needed for both backends docs_service = [ - "requests==2.32.4", # Required for appsync module (dynamodb only needs boto3 which is core) + "requests==2.32.4", # Required for appsync module (dynamodb only needs boto3 which is core) "aws-xray-sdk>=2.14.0", # Required for X-Ray tracing ] @@ -173,7 +173,7 @@ agentic-extraction = [ "jsonpatch==1.33", "strands-agents==1.14.0 ; python_full_version >= '3.10'", "pandas>=2.2.3", - "pymupdf==1.25.5", # Pinned to 1.25.5 - has pre-built ARM64 wheels, 1.26.x requires compilation + "pymupdf==1.25.5", # Pinned to 1.25.5 - has pre-built ARM64 wheels, 1.26.x requires compilation "email-validator>=2.3.0", "tabulate>=0.9.0", "aws-lambda-powertools>=3.2.0", # Structured logging and observability diff --git a/lib/idp_common_pkg/tests/conftest.py b/lib/idp_common_pkg/tests/conftest.py index a83d71d2..f17c0da1 100644 --- a/lib/idp_common_pkg/tests/conftest.py +++ b/lib/idp_common_pkg/tests/conftest.py @@ -14,11 +14,8 @@ # Mock external dependencies that may not be available in test environments # These mocks need to be set up before any imports that might use these packages -# Mock strands modules for agent functionality -sys.modules["strands"] = MagicMock() -sys.modules["strands.models"] = MagicMock() -sys.modules["strands.hooks"] = MagicMock() -sys.modules["strands.hooks.events"] = MagicMock() +# NOTE: strands mocking has been moved to individual test files that need it +# to avoid conflicts with tests that use the real strands package # Mock bedrock_agentcore modules for secure code execution sys.modules["bedrock_agentcore"] = MagicMock() diff --git a/lib/idp_common_pkg/tests/unit/assessment/conftest.py b/lib/idp_common_pkg/tests/unit/assessment/conftest.py new file mode 100644 index 00000000..a1b17cd9 --- /dev/null +++ b/lib/idp_common_pkg/tests/unit/assessment/conftest.py @@ -0,0 +1,24 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Pytest configuration for assessment tests. + +These tests need strands modules mocked since they test the assessment service +which imports strands modules but the tests don't actually execute strands code. +""" + +import sys +from unittest.mock import MagicMock + +# Mock strands modules before any assessment imports +# This allows tests to import assessment code without requiring strands package +sys.modules["strands"] = MagicMock() +sys.modules["strands.agent"] = MagicMock() +sys.modules["strands.agent.conversation_manager"] = MagicMock() +sys.modules["strands.models"] = MagicMock() +sys.modules["strands.models.bedrock"] = MagicMock() +sys.modules["strands.types"] = MagicMock() +sys.modules["strands.types.content"] = MagicMock() +sys.modules["strands.hooks"] = MagicMock() +sys.modules["strands.hooks.events"] = MagicMock() diff --git a/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py b/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py index fc8d6d1c..96db5c1f 100644 --- a/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py +++ b/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py @@ -16,7 +16,6 @@ from idp_common.config.models import ( AssessmentConfig, ExtractionConfig, - GranularAssessmentConfig, IDPConfig, ImageConfig, ) @@ -185,9 +184,7 @@ def test_complex_real_world_scenario(self): top_p=0.1, max_tokens=10000, ), - assessment=AssessmentConfig( - enabled=True, temperature=0.0, granular={"enabled": False} - ), + assessment=AssessmentConfig(enabled=True, temperature=0.0, max_workers=10), classes=[], ) @@ -205,7 +202,7 @@ def test_complex_real_world_scenario(self): assessment=AssessmentConfig( enabled=False, # CUSTOM temperature=0.0, - granular=GranularAssessmentConfig(enabled=False), + max_workers=10, ), classes=[{"$id": "Invoice", "properties": {}}], # CUSTOM ) @@ -225,7 +222,7 @@ def test_complex_real_world_scenario(self): assessment=AssessmentConfig( enabled=True, temperature=0.5, # NEW - granular=GranularAssessmentConfig(enabled=True), # NEW + max_workers=20, # NEW ), classes=[], ) @@ -246,7 +243,7 @@ def test_complex_real_world_scenario(self): assert new_custom.extraction.top_p == 0.2 assert new_custom.extraction.max_tokens == 15000 assert new_custom.assessment.temperature == 0.5 - assert new_custom.assessment.granular.enabled + assert new_custom.assessment.max_workers == 20 @pytest.mark.unit diff --git a/lib/idp_common_pkg/tests/unit/test_granular_assessment.py b/lib/idp_common_pkg/tests/unit/test_granular_assessment.py index 4c4904ef..5df0b926 100644 --- a/lib/idp_common_pkg/tests/unit/test_granular_assessment.py +++ b/lib/idp_common_pkg/tests/unit/test_granular_assessment.py @@ -5,8 +5,6 @@ Unit tests for the granular assessment service. """ -from unittest.mock import patch - import pytest from idp_common.assessment.granular_service import ( AssessmentResult, @@ -46,18 +44,13 @@ def sample_config(self): """Sample configuration for testing.""" return { "assessment": { - "granular": { - "max_workers": 4, - "simple_batch_size": 3, - "list_batch_size": 1, - }, + "max_workers": 4, "model": "us.anthropic.claude-3-7-sonnet-20250219-v1:0", "temperature": 0.0, "top_k": 5, "top_p": 0.1, "max_tokens": 4096, "system_prompt": "You are an assessment expert.", - "task_prompt": "Assess {DOCUMENT_CLASS} with {ATTRIBUTE_NAMES_AND_DESCRIPTIONS}. Results: {EXTRACTION_RESULTS}", "default_confidence_threshold": 0.9, }, "classes": [ @@ -110,13 +103,11 @@ def test_initialization(self, sample_config): service = GranularAssessmentService(config=idp_config) assert service.max_workers == 4 - assert service.simple_batch_size == 3 - assert service.list_batch_size == 1 assert service.enable_parallel # max_workers > 1 def test_initialization_single_worker(self, sample_config): """Test service initialization with single worker.""" - sample_config["assessment"]["granular"]["max_workers"] = 1 + sample_config["assessment"]["max_workers"] = 1 idp_config = IDPConfig.model_validate(sample_config) service = GranularAssessmentService(config=idp_config) @@ -143,43 +134,37 @@ def test_get_class_schema_not_found(self, sample_config): assert schema == {} - def test_format_property_descriptions(self, sample_config): - """Test formatting property descriptions from JSON Schema.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - properties = service._get_class_schema("letter").get("properties", {}) - descriptions = service._format_property_descriptions(properties) - - assert "sender_name" in descriptions - assert "Name of the sender" in descriptions - assert "recipient_name" in descriptions - - def test_create_assessment_tasks_simple_batching( + def test_create_assessment_tasks_simple_attributes( self, sample_config, sample_extraction_results ): - """Test creating assessment tasks with simple attribute batching.""" + """Test creating assessment tasks with simple attributes - new Strands approach.""" idp_config = IDPConfig.model_validate(sample_config) service = GranularAssessmentService(config=idp_config) properties = service._get_class_schema("letter").get("properties", {}) - tasks = service._create_assessment_tasks( + tasks, assessment_structure = service._create_assessment_tasks( sample_extraction_results, properties, 0.9 ) - # With 5 simple attributes and batch_size=3, we should get 2 batches - assert len(tasks) == 2 - assert tasks[0].task_type == "simple_batch" - assert tasks[0].task_id == "simple_batch_0" - assert len(tasks[0].attributes) == 3 # First batch: 3 attributes - assert len(tasks[1].attributes) == 2 # Second batch: 2 attributes + # With new approach: one task per leaf field = 5 tasks + assert len(tasks) == 5 + + # All tasks should be "attribute" type (single field assessment) + assert all(t.task_type == "attribute" for t in tasks) + + # All tasks should have field_path as tuple + assert all(isinstance(t.field_path, tuple) for t in tasks) - # Check that extraction data is properly included - assert "sender_name" in tasks[0].extraction_data - assert "recipient_name" in tasks[0].extraction_data + # All tasks should have parent_assessment_dict reference + assert all(t.parent_assessment_dict is not None for t in tasks) - def test_create_assessment_tasks_with_group_attributes(self, sample_config): - """Test creating assessment tasks with group attributes.""" - # Add a group property to the config + # Check that assessment_structure mirrors extraction_results + assert isinstance(assessment_structure, dict) + assert set(assessment_structure.keys()) == set(sample_extraction_results.keys()) + + def test_create_assessment_tasks_with_nested_object(self, sample_config): + """Test creating assessment tasks with nested object attributes.""" + # Add a nested object property to the config sample_config["classes"][0]["properties"]["address_info"] = { "type": "object", "description": "Address information", @@ -198,21 +183,31 @@ def test_create_assessment_tasks_with_group_attributes(self, sample_config): service = GranularAssessmentService(config=idp_config) properties = service._get_class_schema("letter").get("properties", {}) - tasks = service._create_assessment_tasks(extraction_results, properties, 0.9) + tasks, assessment_structure = service._create_assessment_tasks( + extraction_results, properties, 0.9 + ) - # Should have simple batches + 1 group task - group_tasks = [t for t in tasks if t.task_type == "group"] - assert len(group_tasks) == 1 - assert group_tasks[0].attributes == ["address_info"] - assert "address_info" in group_tasks[0].extraction_data + # Should have 3 tasks: sender_name, address_info.street, address_info.city + assert len(tasks) == 3 - def test_create_assessment_tasks_with_list_attributes(self, sample_config): - """Test creating assessment tasks with list attributes.""" - # Add a list property to the config + # Find nested tasks + nested_tasks = [t for t in tasks if len(t.field_path) > 1] + assert len(nested_tasks) == 2 + + # Check nested paths are tuples + assert any(t.field_path == ("address_info", "street") for t in nested_tasks) + assert any(t.field_path == ("address_info", "city") for t in nested_tasks) + + # Check assessment structure has nested dict + assert "address_info" in assessment_structure + assert isinstance(assessment_structure["address_info"], dict) + + def test_create_assessment_tasks_with_array(self, sample_config): + """Test creating assessment tasks with array attributes.""" + # Add an array property to the config sample_config["classes"][0]["properties"]["transactions"] = { "type": "array", "description": "List of transactions", - "x-aws-idp-list-item-description": "A single transaction", "items": { "type": "object", "properties": { @@ -237,284 +232,112 @@ def test_create_assessment_tasks_with_list_attributes(self, sample_config): service = GranularAssessmentService(config=idp_config) properties = service._get_class_schema("letter").get("properties", {}) - tasks = service._create_assessment_tasks(extraction_results, properties, 0.9) - - # Should have simple batches + 2 list item tasks - list_tasks = [t for t in tasks if t.task_type == "list_item"] - assert len(list_tasks) == 2 - assert list_tasks[0].task_id == "list_transactions_item_0" - assert list_tasks[1].task_id == "list_transactions_item_1" - assert list_tasks[0].list_item_index == 0 - assert list_tasks[1].list_item_index == 1 - - def test_get_task_specific_attribute_descriptions(self, sample_config): - """Test getting task-specific attribute descriptions.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - properties = service._get_class_schema("letter").get("properties", {}) - - # Create a simple batch task - task = AssessmentTask( - task_id="test_batch", - task_type="simple_batch", - attributes=["sender_name", "recipient_name"], - extraction_data={"sender_name": "John", "recipient_name": "Jane"}, - confidence_thresholds={"sender_name": 0.9, "recipient_name": 0.9}, + tasks, assessment_structure = service._create_assessment_tasks( + extraction_results, properties, 0.9 ) - descriptions = service._get_task_specific_attribute_descriptions( - task, properties - ) + # Should have 5 tasks: sender_name + 2 items * 2 fields each = 1 + 4 = 5 + assert len(tasks) == 5 - assert "sender_name" in descriptions - assert "recipient_name" in descriptions - assert "date" not in descriptions # Should only include task attributes + # Find array item tasks + array_tasks = [ + t for t in tasks if len(t.field_path) == 3 + ] # ("transactions", 0, "amount") + assert len(array_tasks) == 4 - def test_build_specific_assessment_prompt(self, sample_config): - """Test building specific assessment prompt.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - properties = service._get_class_schema("letter").get("properties", {}) - - # Mock base content with placeholders (like what would come from the real base content) - base_content = [ - {"text": "Base prompt content with {EXTRACTION_RESULTS} placeholder"}, - {"text": "<>"}, - ] - - # Create a simple batch task - task = AssessmentTask( - task_id="test_batch", - task_type="simple_batch", - attributes=["sender_name", "recipient_name"], - extraction_data={"sender_name": "John", "recipient_name": "Jane"}, - confidence_thresholds={"sender_name": 0.9, "recipient_name": 0.9}, - ) - - content = service._build_specific_assessment_prompt( - task, base_content, properties - ) - - # Should have same number of content items as base content - assert len(content) == 2 - - # First item should have placeholder replaced with extraction results - first_content = content[0]["text"] - assert "Base prompt content with" in first_content - assert ( - "{EXTRACTION_RESULTS}" not in first_content - ) # Placeholder should be replaced - assert "sender_name" in first_content - assert "recipient_name" in first_content - assert "John" in first_content - assert "Jane" in first_content - - # Cache point should be preserved - assert content[1]["text"] == "<>" - - def test_build_cached_prompt_base(self, sample_config): - """Test building cached prompt base.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - - content = service._build_cached_prompt_base( - document_text="Sample document text", - class_label="letter", - attribute_descriptions="", # Empty for base content - will be task-specific - ocr_text_confidence="OCR confidence data", - page_images=[], - ) - - # Should have at least one text content item - assert len(content) >= 1 - # Check that the task prompt template is used and placeholders are replaced + # Check array paths include indices + assert any(t.field_path == ("transactions", 0, "amount") for t in array_tasks) assert any( - "letter" in item.get("text", "") for item in content - ) # DOCUMENT_CLASS - # Attribute descriptions should NOT be in base content (they're task-specific) - assert not any( - "sender_name: Name of sender" in item.get("text", "") for item in content - ) # ATTRIBUTE_NAMES_AND_DESCRIPTIONS should be placeholder - # Should contain placeholders for task-specific content - assert any( - "{ATTRIBUTE_NAMES_AND_DESCRIPTIONS}" in item.get("text", "") - or "{EXTRACTION_RESULTS}" in item.get("text", "") - for item in content + t.field_path == ("transactions", 1, "description") for t in array_tasks ) - @patch("idp_common.bedrock.invoke_model") - def test_process_assessment_task_success(self, mock_bedrock, sample_config): - """Test successful processing of an assessment task.""" - # Mock Bedrock response - mock_response = { - "metering": { - "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { - "input_tokens": 100, - "output_tokens": 50, - } - }, - "response": { - "output": { - "message": { - "content": [ - { - "text": '{"sender_name": {"confidence": 0.95, "confidence_reason": "Clear evidence"}}' - } - ] - } - } - }, - } - mock_bedrock.return_value = mock_response + # Check assessment structure has array + assert "transactions" in assessment_structure + assert isinstance(assessment_structure["transactions"], list) + assert len(assessment_structure["transactions"]) == 2 + def test_aggregate_assessment_results_new_approach(self, sample_config): + """Test aggregating assessment results with new Strands approach.""" idp_config = IDPConfig.model_validate(sample_config) service = GranularAssessmentService(config=idp_config) - properties = service._get_class_schema("letter").get("properties", {}) - # Create a task - task = AssessmentTask( - task_id="test_batch", - task_type="simple_batch", - attributes=["sender_name"], - extraction_data={"sender_name": "John"}, - confidence_thresholds={"sender_name": 0.9}, - ) - - base_content = [{"text": "Base prompt"}] - - result = service._process_assessment_task( - task, - base_content, - properties, - "test-model", - "system prompt", - 0.0, - 5, - 0.1, - 4096, - ) - - assert result.success - assert result.task_id == "test_batch" - assert "sender_name" in result.assessment_data - assert result.assessment_data["sender_name"]["confidence"] == 0.95 - - @patch("idp_common.bedrock.invoke_model") - def test_process_assessment_task_bedrock_error(self, mock_bedrock, sample_config): - """Test processing assessment task with Bedrock error.""" - # Mock Bedrock to raise an exception - mock_bedrock.side_effect = Exception("Bedrock error") - - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - properties = service._get_class_schema("letter").get("properties", {}) - - task = AssessmentTask( - task_id="test_batch", - task_type="simple_batch", - attributes=["sender_name"], - extraction_data={"sender_name": "John"}, - confidence_thresholds={"sender_name": 0.9}, - ) - - base_content = [{"text": "Base prompt"}] - - result = service._process_assessment_task( - task, - base_content, - properties, - "test-model", - "system prompt", - 0.0, - 5, - 0.1, - 4096, - ) - - assert not result.success - assert result.error_message == "Bedrock error" - - def test_check_confidence_alerts_simple_batch(self, sample_config): - """Test confidence alert checking for simple batch tasks.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - - task = AssessmentTask( - task_id="test_batch", - task_type="simple_batch", - attributes=["sender_name", "recipient_name"], - extraction_data={}, - confidence_thresholds={"sender_name": 0.9, "recipient_name": 0.8}, - ) - - assessment_data = { - "sender_name": {"confidence": 0.95}, # Above threshold - "recipient_name": {"confidence": 0.7}, # Below threshold + # Create pre-built assessment structure + assessment_structure = { + "sender_name": None, + "recipient_name": None, + "date": None, } - alerts = [] - service._check_confidence_alerts_for_task(task, assessment_data, alerts) - - assert len(alerts) == 1 - assert alerts[0]["attribute_name"] == "recipient_name" - assert alerts[0]["confidence"] == 0.7 - assert alerts[0]["confidence_threshold"] == 0.8 - - def test_aggregate_assessment_results(self, sample_config): - """Test aggregating assessment results.""" - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - - # Create tasks and results + # Create tasks with new structure task1 = AssessmentTask( - task_id="batch_0", - task_type="simple_batch", - attributes=["sender_name", "recipient_name"], - extraction_data={}, - confidence_thresholds={"sender_name": 0.9, "recipient_name": 0.9}, + task_id="task_0", + task_type="attribute", + field_path=("sender_name",), + field_name="sender_name", + field_schema={"type": "string"}, + confidence_threshold=0.9, + parent_assessment_dict=assessment_structure, ) task2 = AssessmentTask( - task_id="batch_1", - task_type="simple_batch", - attributes=["date"], - extraction_data={}, - confidence_thresholds={"date": 0.9}, + task_id="task_1", + task_type="attribute", + field_path=("recipient_name",), + field_name="recipient_name", + field_schema={"type": "string"}, + confidence_threshold=0.9, + parent_assessment_dict=assessment_structure, ) + task3 = AssessmentTask( + task_id="task_2", + task_type="attribute", + field_path=("date",), + field_name="date", + field_schema={"type": "string"}, + confidence_threshold=0.9, + parent_assessment_dict=assessment_structure, + ) + + # Create results result1 = AssessmentResult( - task_id="batch_0", + task_id="task_0", success=True, - assessment_data={ - "sender_name": {"confidence": 0.95, "confidence_reason": "Clear"}, - "recipient_name": {"confidence": 0.85, "confidence_reason": "Good"}, - }, + assessment_data={"confidence": 0.95, "confidence_reason": "Clear"}, confidence_alerts=[], metering={"model": {"input_tokens": 100}}, ) result2 = AssessmentResult( - task_id="batch_1", + task_id="task_1", success=True, - assessment_data={ - "date": {"confidence": 0.90, "confidence_reason": "Clear date"} - }, + assessment_data={"confidence": 0.85, "confidence_reason": "Good"}, confidence_alerts=[], metering={"model": {"input_tokens": 50}}, ) + result3 = AssessmentResult( + task_id="task_2", + success=True, + assessment_data={"confidence": 0.90, "confidence_reason": "Clear date"}, + confidence_alerts=[], + metering={"model": {"input_tokens": 25}}, + ) + + # Aggregate results using new signature enhanced_data, alerts, metering = service._aggregate_assessment_results( - [task1, task2], [result1, result2], {} + [task1, task2, task3], [result1, result2, result3], assessment_structure ) - # Check enhanced data + # Check enhanced data (should be the assessment_structure with values filled in) assert "sender_name" in enhanced_data assert "recipient_name" in enhanced_data assert "date" in enhanced_data assert enhanced_data["sender_name"]["confidence_threshold"] == 0.9 + assert enhanced_data["sender_name"]["confidence"] == 0.95 - # Check metering aggregation (using utils.merge_metering_data) - assert metering["model"]["input_tokens"] == 150 + # Check metering aggregation + assert metering["model"]["input_tokens"] == 175 # 100 + 50 + 25 def test_empty_extraction_results_handling(self, sample_config): """Test handling of empty extraction results.""" @@ -523,23 +346,11 @@ def test_empty_extraction_results_handling(self, sample_config): properties = service._get_class_schema("letter").get("properties", {}) # Empty extraction results should create no tasks - tasks = service._create_assessment_tasks({}, properties, 0.9) + tasks, assessment_structure = service._create_assessment_tasks( + {}, properties, 0.9 + ) assert len(tasks) == 0 - - def test_missing_task_prompt_uses_default(self, sample_config): - """Test that default task_prompt is used when not in config.""" - # Remove task_prompt from config - del sample_config["assessment"]["task_prompt"] - - idp_config = IDPConfig.model_validate(sample_config) - service = GranularAssessmentService(config=idp_config) - - # Should not raise an error, should use default task_prompt from IDPConfig - prompt = service._build_cached_prompt_base("text", "letter", "attrs", "ocr", []) - - # Verify a prompt was generated (not empty) - assert prompt is not None - assert len(prompt) > 0 + assert assessment_structure == {} def test_confidence_threshold_inheritance(self, sample_config): """Test that confidence thresholds are properly inherited.""" diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index 66681778..2be2cafe 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -1,12 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.9, <3.14" -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", -] +requires-python = ">=3.12, <3.14" [[package]] name = "aiobotocore" @@ -42,7 +36,6 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, { name = "aiosignal" }, - { name = "async-timeout", marker = "python_full_version < '3.11'" }, { name = "attrs" }, { name = "frozenlist" }, { name = "multidict" }, @@ -51,40 +44,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348, upload-time = "2025-10-06T19:58:48.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/18/a3a9c9b7c8d400f71d1ff93c3e1520a5d53dba170f829ca9c6b2b070677b/aiohttp-3.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ca69ec38adf5cadcc21d0b25e2144f6a25b7db7bea7e730bac25075bc305eff0", size = 734428, upload-time = "2025-10-06T19:54:40.285Z" }, - { url = "https://files.pythonhosted.org/packages/aa/02/f1eac06d78997e015030130ccf1c7cf864a919f97d77ff27e89c82fc3186/aiohttp-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:240f99f88a9a6beb53ebadac79a2e3417247aa756202ed234b1dbae13d248092", size = 491939, upload-time = "2025-10-06T19:54:42.113Z" }, - { url = "https://files.pythonhosted.org/packages/e1/db/5d65af7cbe5f302e23b1ea5cfc156cd0c7738a0d2db531a3837d2754de94/aiohttp-3.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a4676b978a9711531e7cea499d4cdc0794c617a1c0579310ab46c9fdf5877702", size = 487229, upload-time = "2025-10-06T19:54:43.978Z" }, - { url = "https://files.pythonhosted.org/packages/d3/d5/56c622ad3bd57ff4adc2b701f298dcc0408735a8af998cec1c66a9ce224e/aiohttp-3.13.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48fcdd5bc771cbbab8ccc9588b8b6447f6a30f9fe00898b1a5107098e00d6793", size = 1666118, upload-time = "2025-10-06T19:54:46.569Z" }, - { url = "https://files.pythonhosted.org/packages/44/16/db236671ec3758e3a6be6977009e74016470368012a58fea4b3799546549/aiohttp-3.13.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eeea0cdd2f687e210c8f605f322d7b0300ba55145014a5dbe98bd4be6fff1f6c", size = 1633983, upload-time = "2025-10-06T19:54:48.244Z" }, - { url = "https://files.pythonhosted.org/packages/19/ad/d96d7d7023e7f5215b8737cad21a7637f6d9d10fbfbfef0435d0277f71a2/aiohttp-3.13.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b3f01d5aeb632adaaf39c5e93f040a550464a768d54c514050c635adcbb9d0", size = 1725922, upload-time = "2025-10-06T19:54:49.885Z" }, - { url = "https://files.pythonhosted.org/packages/88/d7/e8a5ba2bbd929ed587b2a8ea9390765daede2d8cd28dfae3a0773c6d3fbc/aiohttp-3.13.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a4dc0b83e25267f42ef065ea57653de4365b56d7bc4e4cfc94fabe56998f8ee6", size = 1813770, upload-time = "2025-10-06T19:54:51.648Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ca/135c21e85ffeff66b80ecd8a647ca104f2e5a91c37dc86649244ddbf87ab/aiohttp-3.13.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:72714919ed9b90f030f761c20670e529c4af96c31bd000917dd0c9afd1afb731", size = 1667322, upload-time = "2025-10-06T19:54:53.668Z" }, - { url = "https://files.pythonhosted.org/packages/f6/38/348c4343052a400968dbf2051ee3dc222bdefd95af5874cf0f04cc7a8c92/aiohttp-3.13.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:564be41e85318403fdb176e9e5b3e852d528392f42f2c1d1efcbeeed481126d7", size = 1553270, upload-time = "2025-10-06T19:54:56.054Z" }, - { url = "https://files.pythonhosted.org/packages/47/89/71cbda30f0900ab16084769960c467a355d6b1db51668fbb821c4a4ad5ed/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:84912962071087286333f70569362e10793f73f45c48854e6859df11001eb2d3", size = 1637087, upload-time = "2025-10-06T19:54:58.548Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b1/5ff5fcaecccdcd5be7ff717cbde6e630760a8130e89167c3aa05b6b57707/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:90b570f1a146181c3d6ae8f755de66227ded49d30d050479b5ae07710f7894c5", size = 1643443, upload-time = "2025-10-06T19:55:00.856Z" }, - { url = "https://files.pythonhosted.org/packages/87/e2/1d1f202f43c8be1956f05196159064cc05dc6842a33c1397cbb1b99610af/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2d71ca30257ce756e37a6078b1dff2d9475fee13609ad831eac9a6531bea903b", size = 1695571, upload-time = "2025-10-06T19:55:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b9/53c1df2991686f947a9651265757ea12c4afc29b351a249b73a0fc81dd3c/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:cd45eb70eca63f41bb156b7dffbe1a7760153b69892d923bdb79a74099e2ed90", size = 1539975, upload-time = "2025-10-06T19:55:04.839Z" }, - { url = "https://files.pythonhosted.org/packages/93/24/345166f9c4cd2f5cc1d2173131998ee4adab0db8729126db32a7f91ed400/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5ae3a19949a27982c7425a7a5a963c1268fdbabf0be15ab59448cbcf0f992519", size = 1712866, upload-time = "2025-10-06T19:55:06.905Z" }, - { url = "https://files.pythonhosted.org/packages/09/f1/e8f70462848b74d49b3115050623ecbd697889713c2c93c96616da56b2de/aiohttp-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ea6df292013c9f050cbf3f93eee9953d6e5acd9e64a0bf4ca16404bfd7aa9bcc", size = 1654058, upload-time = "2025-10-06T19:55:08.51Z" }, - { url = "https://files.pythonhosted.org/packages/23/ba/47fd065510a8bfab5d5f6e1d97c0de672447c0a941c5021298bd7210afc3/aiohttp-3.13.0-cp310-cp310-win32.whl", hash = "sha256:3b64f22fbb6dcd5663de5ef2d847a5638646ef99112503e6f7704bdecb0d1c4d", size = 430230, upload-time = "2025-10-06T19:55:10.178Z" }, - { url = "https://files.pythonhosted.org/packages/c4/38/f5385cb79afa1f31bcaa3625a9e8d849b782edaeac09f894f46439e006a1/aiohttp-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:f8d877aa60d80715b2afc565f0f1aea66565824c229a2d065b31670e09fed6d7", size = 453013, upload-time = "2025-10-06T19:55:11.623Z" }, - { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374, upload-time = "2025-10-06T19:55:13.095Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956, upload-time = "2025-10-06T19:55:14.687Z" }, - { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154, upload-time = "2025-10-06T19:55:16.661Z" }, - { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707, upload-time = "2025-10-06T19:55:18.376Z" }, - { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404, upload-time = "2025-10-06T19:55:20.098Z" }, - { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519, upload-time = "2025-10-06T19:55:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904, upload-time = "2025-10-06T19:55:23.462Z" }, - { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043, upload-time = "2025-10-06T19:55:25.208Z" }, - { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765, upload-time = "2025-10-06T19:55:27.157Z" }, - { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737, upload-time = "2025-10-06T19:55:28.854Z" }, - { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052, upload-time = "2025-10-06T19:55:30.563Z" }, - { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532, upload-time = "2025-10-06T19:55:32.798Z" }, - { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072, upload-time = "2025-10-06T19:55:34.686Z" }, - { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613, upload-time = "2025-10-06T19:55:36.393Z" }, - { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480, upload-time = "2025-10-06T19:55:38.043Z" }, - { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824, upload-time = "2025-10-06T19:55:39.595Z" }, - { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137, upload-time = "2025-10-06T19:55:41.617Z" }, { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585, upload-time = "2025-10-06T19:55:43.401Z" }, { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613, upload-time = "2025-10-06T19:55:45.237Z" }, { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750, upload-time = "2025-10-06T19:55:46.937Z" }, @@ -119,32 +78,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/e7/cc9f0fdf06cab3ca61e6b62bff9a4b978b8ca736e9d76ddf54365673ab19/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:582770f82513419512da096e8df21ca44f86a2e56e25dc93c5ab4df0fe065bf0", size = 1714933, upload-time = "2025-10-06T19:56:45.542Z" }, { url = "https://files.pythonhosted.org/packages/db/43/7abbe1de94748a58a71881163ee280fd3217db36e8344d109f63638fe16a/aiohttp-3.13.0-cp313-cp313-win32.whl", hash = "sha256:3194b8cab8dbc882f37c13ef1262e0a3d62064fa97533d3aa124771f7bf1ecee", size = 423799, upload-time = "2025-10-06T19:56:47.779Z" }, { url = "https://files.pythonhosted.org/packages/c9/58/afab7f2b9e7df88c995995172eb78cae8a3d5a62d5681abaade86b3f0089/aiohttp-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:7897298b3eedc790257fef8a6ec582ca04e9dbe568ba4a9a890913b925b8ea21", size = 450138, upload-time = "2025-10-06T19:56:49.49Z" }, - { url = "https://files.pythonhosted.org/packages/40/26/6625e909ea770910d9901cb04336d0280a5a578daa352434eba5e482a1eb/aiohttp-3.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fcc425fb6fd2a00c6d91c85d084c6b75a61bc8bc12159d08e17c5711df6c5ba4", size = 736970, upload-time = "2025-10-06T19:58:07.563Z" }, - { url = "https://files.pythonhosted.org/packages/b7/bc/ce07cc194f416f2d2539fec1237507b97496752f350738a051258be3dc08/aiohttp-3.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c2c4c9ce834801651f81d6760d0a51035b8b239f58f298de25162fcf6f8bb64", size = 493252, upload-time = "2025-10-06T19:58:10.146Z" }, - { url = "https://files.pythonhosted.org/packages/e1/70/f51f30694aa4f9de3fad5b254609c263bf873a36dfa36f0370b37acb6b18/aiohttp-3.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f91e8f9053a07177868e813656ec57599cd2a63238844393cd01bd69c2e40147", size = 488585, upload-time = "2025-10-06T19:58:12.438Z" }, - { url = "https://files.pythonhosted.org/packages/1f/89/4dd10ab758ba0dee025fc53345d24f5943f4d8741c1064ed3e92ca3274a8/aiohttp-3.13.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df46d9a3d78ec19b495b1107bf26e4fcf97c900279901f4f4819ac5bb2a02a4c", size = 1660915, upload-time = "2025-10-06T19:58:14.359Z" }, - { url = "https://files.pythonhosted.org/packages/49/72/f3a894182b625c7f242ab90ff95d6fbf4319d39dc854568a33c8768220ec/aiohttp-3.13.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3b1eb9871cbe43b6ca6fac3544682971539d8a1d229e6babe43446279679609d", size = 1625065, upload-time = "2025-10-06T19:58:16.754Z" }, - { url = "https://files.pythonhosted.org/packages/a5/3d/31f310c67b33e30d1f2fe11bf5aeb82d178189e8abfcf696bed69a9713cb/aiohttp-3.13.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:62a3cddf8d9a2eae1f79585fa81d32e13d0c509bb9e7ad47d33c83b45a944df7", size = 1721983, upload-time = "2025-10-06T19:58:19.275Z" }, - { url = "https://files.pythonhosted.org/packages/f1/6a/64f23ffd040409ac148a389be5c377f8edc4ab2aebdd601dbe786a30e05d/aiohttp-3.13.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0f735e680c323ee7e9ef8e2ea26425c7dbc2ede0086fa83ce9d7ccab8a089f26", size = 1810847, upload-time = "2025-10-06T19:58:21.419Z" }, - { url = "https://files.pythonhosted.org/packages/51/64/9951b85fdf53aea1e9f41c715e9f4fc3e60f430fb58e0a46bc7837688e39/aiohttp-3.13.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a51839f778b0e283b43cd82bb17f1835ee2cc1bf1101765e90ae886e53e751c", size = 1658909, upload-time = "2025-10-06T19:58:23.406Z" }, - { url = "https://files.pythonhosted.org/packages/ac/66/4d2c26a8a86b81a4493beb222457c6cba9f3853b6efa476a97edb4d52bb9/aiohttp-3.13.0-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac90cfab65bc281d6752f22db5fa90419e33220af4b4fa53b51f5948f414c0e7", size = 1551803, upload-time = "2025-10-06T19:58:25.568Z" }, - { url = "https://files.pythonhosted.org/packages/a6/9a/c6b4eb325f473f3a117fab90008348fc7094ccbaeeedf811d89738454996/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:62fd54f3e6f17976962ba67f911d62723c760a69d54f5d7b74c3ceb1a4e9ef8d", size = 1632205, upload-time = "2025-10-06T19:58:27.976Z" }, - { url = "https://files.pythonhosted.org/packages/55/27/6fa2a583d45f711cf2f719f0d69274739a94f3b6d3deddfd19106d2ef3b9/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cf2b60b65df05b6b2fa0d887f2189991a0dbf44a0dd18359001dc8fcdb7f1163", size = 1637447, upload-time = "2025-10-06T19:58:30.143Z" }, - { url = "https://files.pythonhosted.org/packages/b5/2a/ca6f2ac71b6c7e6f93ad42616e0a75660d8b9400977053126b7eaeaa5132/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1ccedfe280e804d9a9d7fe8b8c4309d28e364b77f40309c86596baa754af50b1", size = 1692038, upload-time = "2025-10-06T19:58:32.227Z" }, - { url = "https://files.pythonhosted.org/packages/4e/fa/221c09563a11ce0d22408bf41e4bd8385f2093a22ceb60eeddd1f5019bab/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ea01ffbe23df53ece0c8732d1585b3d6079bb8c9ee14f3745daf000051415a31", size = 1538478, upload-time = "2025-10-06T19:58:34.776Z" }, - { url = "https://files.pythonhosted.org/packages/fd/09/06b8b45874ba26d7b012b9b21b6593d9a7a0a4e3375da83c74eb044caafa/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:19ba8625fa69523627b67f7e9901b587a4952470f68814d79cdc5bc460e9b885", size = 1708317, upload-time = "2025-10-06T19:58:38.746Z" }, - { url = "https://files.pythonhosted.org/packages/63/b2/ee8c8bae8d2f17a146089bba8130cf18eb0e929d4c5262beae944da82b3a/aiohttp-3.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b14bfae90598d331b5061fd15a7c290ea0c15b34aeb1cf620464bb5ec02a602", size = 1647241, upload-time = "2025-10-06T19:58:41.233Z" }, - { url = "https://files.pythonhosted.org/packages/c4/e4/455240c0433ebc7c9400c6014d5e4d09509d0aeddb8c9d22aff1f3d40500/aiohttp-3.13.0-cp39-cp39-win32.whl", hash = "sha256:cf7a4b976da219e726d0043fc94ae8169c0dba1d3a059b3c1e2c964bafc5a77d", size = 430922, upload-time = "2025-10-06T19:58:43.699Z" }, - { url = "https://files.pythonhosted.org/packages/e6/e6/f49ed1869b961f41aa0421d35401dd4ee4c8ad74b3da618f7bde1a8ddc29/aiohttp-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b9697d15231aeaed4786f090c9c8bc3ab5f0e0a6da1e76c135a310def271020", size = 453958, upload-time = "2025-10-06T19:58:45.868Z" }, ] [[package]] name = "aioitertools" version = "0.12.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369, upload-time = "2024-09-02T03:33:40.349Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345, upload-time = "2024-09-02T03:34:59.454Z" }, @@ -225,7 +164,6 @@ name = "anyio" version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, @@ -284,11 +222,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2", size = 27180, upload-time = "2025-07-30T10:01:57.759Z" }, { url = "https://files.pythonhosted.org/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98", size = 31715, upload-time = "2025-07-30T10:01:58.56Z" }, { url = "https://files.pythonhosted.org/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94", size = 27149, upload-time = "2025-07-30T10:01:59.329Z" }, - { url = "https://files.pythonhosted.org/packages/11/2d/ba4e4ca8d149f8dcc0d952ac0967089e1d759c7e5fcf0865a317eb680fbb/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e", size = 24549, upload-time = "2025-07-30T10:02:00.101Z" }, - { url = "https://files.pythonhosted.org/packages/5c/82/9b2386cc75ac0bd3210e12a44bfc7fd1632065ed8b80d573036eecb10442/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d", size = 25539, upload-time = "2025-07-30T10:02:00.929Z" }, - { url = "https://files.pythonhosted.org/packages/31/db/740de99a37aa727623730c90d92c22c9e12585b3c98c54b7960f7810289f/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584", size = 28467, upload-time = "2025-07-30T10:02:02.08Z" }, - { url = "https://files.pythonhosted.org/packages/71/7a/47c4509ea18d755f44e2b92b7178914f0c113946d11e16e626df8eaa2b0b/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690", size = 27355, upload-time = "2025-07-30T10:02:02.867Z" }, - { url = "https://files.pythonhosted.org/packages/ee/82/82745642d3c46e7cea25e1885b014b033f4693346ce46b7f47483cf5d448/argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520", size = 29187, upload-time = "2025-07-30T10:02:03.674Z" }, ] [[package]] @@ -317,23 +250,11 @@ wheels = [ name = "async-lru" version = "2.0.5" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, ] -[[package]] -name = "async-timeout" -version = "5.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, -] - [[package]] name = "attrs" version = "25.4.0" @@ -361,7 +282,7 @@ name = "aws-requests-auth" version = "0.4.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "requests", marker = "python_full_version >= '3.10'" }, + { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/54/b2/455c0bfcbd772dafd4c9e93c4b713e36790abf9ccbca9b8e661968b29798/aws-requests-auth-0.4.3.tar.gz", hash = "sha256:33593372018b960a31dbbe236f89421678b885c35f0b6a7abfae35bb77e069b2", size = 10096, upload-time = "2020-05-27T23:10:34.742Z" } wheels = [ @@ -390,15 +311,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, ] -[[package]] -name = "backports-asyncio-runner" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, -] - [[package]] name = "beautifulsoup4" version = "4.14.2" @@ -417,13 +329,13 @@ name = "bedrock-agentcore" version = "0.1.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "boto3", marker = "python_full_version >= '3.10'" }, - { name = "botocore", marker = "python_full_version >= '3.10'" }, - { name = "pydantic", marker = "python_full_version >= '3.10'" }, - { name = "starlette", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, - { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "uvicorn", marker = "python_full_version >= '3.10'" }, + { name = "boto3" }, + { name = "botocore" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "urllib3" }, + { name = "uvicorn" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/23/be1672a79632a1c36b049fc29103732f6ca2dd8596ffcc3a59a1d6e59f4c/bedrock_agentcore-0.1.4.tar.gz", hash = "sha256:59c513df840ef66843915c7c229603732a3e7d7e76cb9d618be5663dbe1cc863", size = 229574, upload-time = "2025-09-17T18:29:23.327Z" } wheels = [ @@ -435,27 +347,15 @@ name = "black" version = "25.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "mypy-extensions" }, { name = "packaging" }, { name = "pathspec" }, - { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs" }, { name = "pytokens" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4b/43/20b5c90612d7bdb2bdbcceeb53d588acca3bb8f0e4c5d5c751a2c8fdd55a/black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619", size = 648393, upload-time = "2025-09-19T00:27:37.758Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/40/dbe31fc56b218a858c8fc6f5d8d3ba61c1fa7e989d43d4a4574b8b992840/black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7", size = 1715605, upload-time = "2025-09-19T00:36:13.483Z" }, - { url = "https://files.pythonhosted.org/packages/92/b2/f46800621200eab6479b1f4c0e3ede5b4c06b768e79ee228bc80270bcc74/black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92", size = 1571829, upload-time = "2025-09-19T00:32:42.13Z" }, - { url = "https://files.pythonhosted.org/packages/4e/64/5c7f66bd65af5c19b4ea86062bb585adc28d51d37babf70969e804dbd5c2/black-25.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96b6726d690c96c60ba682955199f8c39abc1ae0c3a494a9c62c0184049a713", size = 1631888, upload-time = "2025-09-19T00:30:54.212Z" }, - { url = "https://files.pythonhosted.org/packages/3b/64/0b9e5bfcf67db25a6eef6d9be6726499a8a72ebab3888c2de135190853d3/black-25.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d119957b37cc641596063cd7db2656c5be3752ac17877017b2ffcdb9dfc4d2b1", size = 1327056, upload-time = "2025-09-19T00:31:08.877Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f4/7531d4a336d2d4ac6cc101662184c8e7d068b548d35d874415ed9f4116ef/black-25.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:456386fe87bad41b806d53c062e2974615825c7a52159cde7ccaeb0695fa28fa", size = 1698727, upload-time = "2025-09-19T00:31:14.264Z" }, - { url = "https://files.pythonhosted.org/packages/28/f9/66f26bfbbf84b949cc77a41a43e138d83b109502cd9c52dfc94070ca51f2/black-25.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a16b14a44c1af60a210d8da28e108e13e75a284bf21a9afa6b4571f96ab8bb9d", size = 1555679, upload-time = "2025-09-19T00:31:29.265Z" }, - { url = "https://files.pythonhosted.org/packages/bf/59/61475115906052f415f518a648a9ac679d7afbc8da1c16f8fdf68a8cebed/black-25.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aaf319612536d502fdd0e88ce52d8f1352b2c0a955cc2798f79eeca9d3af0608", size = 1617453, upload-time = "2025-09-19T00:30:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/7f/5b/20fd5c884d14550c911e4fb1b0dae00d4abb60a4f3876b449c4d3a9141d5/black-25.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:c0372a93e16b3954208417bfe448e09b0de5cc721d521866cd9e0acac3c04a1f", size = 1333655, upload-time = "2025-09-19T00:30:56.715Z" }, { url = "https://files.pythonhosted.org/packages/fb/8e/319cfe6c82f7e2d5bfb4d3353c6cc85b523d677ff59edc61fdb9ee275234/black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0", size = 1742012, upload-time = "2025-09-19T00:33:08.678Z" }, { url = "https://files.pythonhosted.org/packages/94/cc/f562fe5d0a40cd2a4e6ae3f685e4c36e365b1f7e494af99c26ff7f28117f/black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4", size = 1581421, upload-time = "2025-09-19T00:35:25.937Z" }, { url = "https://files.pythonhosted.org/packages/84/67/6db6dff1ebc8965fd7661498aea0da5d7301074b85bba8606a28f47ede4d/black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e", size = 1655619, upload-time = "2025-09-19T00:30:49.241Z" }, @@ -464,10 +364,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" }, { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" }, { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/0f724eb152bc9fc03029a9c903ddd77a288285042222a381050d27e64ac1/black-25.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef69351df3c84485a8beb6f7b8f9721e2009e20ef80a8d619e2d1788b7816d47", size = 1715243, upload-time = "2025-09-19T00:34:14.216Z" }, - { url = "https://files.pythonhosted.org/packages/fb/be/cb986ea2f0fabd0ee58668367724ba16c3a042842e9ebe009c139f8221c9/black-25.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c1f4cd5e93842774d9ee4ef6cd8d17790e65f44f7cdbaab5f2cf8ccf22a823", size = 1571246, upload-time = "2025-09-19T00:31:39.624Z" }, - { url = "https://files.pythonhosted.org/packages/82/ce/74cf4d66963fca33ab710e4c5817ceeff843c45649f61f41d88694c2e5db/black-25.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:154b06d618233fe468236ba1f0e40823d4eb08b26f5e9261526fde34916b9140", size = 1631265, upload-time = "2025-09-19T00:31:05.341Z" }, - { url = "https://files.pythonhosted.org/packages/ff/f3/9b11e001e84b4d1721f75e20b3c058854a748407e6fc1abe6da0aa22014f/black-25.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e593466de7b998374ea2585a471ba90553283fb9beefcfa430d84a2651ed5933", size = 1326615, upload-time = "2025-09-19T00:31:25.347Z" }, { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" }, ] @@ -509,8 +405,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, - { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6d/d0/9d64261186cff650fe63168441edb4f4cd33f085a74c0c54455630a71f91/botocore-1.39.11.tar.gz", hash = "sha256:953b12909d6799350e346ab038e55b6efe622c616f80aef74d7a6683ffdd972c", size = 14217749, upload-time = "2025-07-22T19:26:40.723Z" } wheels = [ @@ -535,31 +430,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, - { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, - { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, - { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, - { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, - { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, - { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, - { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, - { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, - { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, - { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, - { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, - { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, - { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, - { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, @@ -584,18 +454,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, - { url = "https://files.pythonhosted.org/packages/c0/cc/08ed5a43f2996a16b462f64a7055c6e962803534924b9b2f1371d8c00b7b/cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf", size = 184288, upload-time = "2025-09-08T23:23:48.404Z" }, - { url = "https://files.pythonhosted.org/packages/3d/de/38d9726324e127f727b4ecc376bc85e505bfe61ef130eaf3f290c6847dd4/cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7", size = 180509, upload-time = "2025-09-08T23:23:49.73Z" }, - { url = "https://files.pythonhosted.org/packages/9b/13/c92e36358fbcc39cf0962e83223c9522154ee8630e1df7c0b3a39a8124e2/cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c", size = 208813, upload-time = "2025-09-08T23:23:51.263Z" }, - { url = "https://files.pythonhosted.org/packages/15/12/a7a79bd0df4c3bff744b2d7e52cc1b68d5e7e427b384252c42366dc1ecbc/cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165", size = 216498, upload-time = "2025-09-08T23:23:52.494Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/5c51c1c7600bdd7ed9a24a203ec255dccdd0ebf4527f7b922a0bde2fb6ed/cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534", size = 203243, upload-time = "2025-09-08T23:23:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/32/f2/81b63e288295928739d715d00952c8c6034cb6c6a516b17d37e0c8be5600/cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f", size = 203158, upload-time = "2025-09-08T23:23:55.169Z" }, - { url = "https://files.pythonhosted.org/packages/1f/74/cc4096ce66f5939042ae094e2e96f53426a979864aa1f96a621ad128be27/cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63", size = 216548, upload-time = "2025-09-08T23:23:56.506Z" }, - { url = "https://files.pythonhosted.org/packages/e8/be/f6424d1dc46b1091ffcc8964fa7c0ab0cd36839dd2761b49c90481a6ba1b/cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2", size = 218897, upload-time = "2025-09-08T23:23:57.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/e0/dda537c2309817edf60109e39265f24f24aa7f050767e22c98c53fe7f48b/cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65", size = 211249, upload-time = "2025-09-08T23:23:59.139Z" }, - { url = "https://files.pythonhosted.org/packages/2b/e7/7c769804eb75e4c4b35e658dba01de1640a351a9653c3d49ca89d16ccc91/cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322", size = 218041, upload-time = "2025-09-08T23:24:00.496Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d9/6218d78f920dcd7507fc16a766b5ef8f3b913cc7aa938e7fc80b9978d089/cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a", size = 172138, upload-time = "2025-09-08T23:24:01.7Z" }, - { url = "https://files.pythonhosted.org/packages/54/8f/a1e836f82d8e32a97e6b29cc8f641779181ac7363734f12df27db803ebda/cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9", size = 182794, upload-time = "2025-09-08T23:24:02.943Z" }, +] + +[[package]] +name = "chardet" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618, upload-time = "2023-08-01T19:23:02.662Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385, upload-time = "2023-08-01T19:23:00.661Z" }, ] [[package]] @@ -604,28 +471,6 @@ version = "3.4.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, - { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, - { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, - { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, - { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, - { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, - { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, - { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, - { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, - { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, - { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, - { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, - { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, - { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, - { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, - { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, @@ -648,46 +493,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ca/9a0983dd5c8e9733565cf3db4df2b0a2e9a82659fd8aa2a868ac6e4a991f/charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05", size = 207520, upload-time = "2025-08-09T07:57:11.026Z" }, - { url = "https://files.pythonhosted.org/packages/39/c6/99271dc37243a4f925b09090493fb96c9333d7992c6187f5cfe5312008d2/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e", size = 147307, upload-time = "2025-08-09T07:57:12.4Z" }, - { url = "https://files.pythonhosted.org/packages/e4/69/132eab043356bba06eb333cc2cc60c6340857d0a2e4ca6dc2b51312886b3/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99", size = 160448, upload-time = "2025-08-09T07:57:13.712Z" }, - { url = "https://files.pythonhosted.org/packages/04/9a/914d294daa4809c57667b77470533e65def9c0be1ef8b4c1183a99170e9d/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7", size = 157758, upload-time = "2025-08-09T07:57:14.979Z" }, - { url = "https://files.pythonhosted.org/packages/b0/a8/6f5bcf1bcf63cb45625f7c5cadca026121ff8a6c8a3256d8d8cd59302663/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7", size = 152487, upload-time = "2025-08-09T07:57:16.332Z" }, - { url = "https://files.pythonhosted.org/packages/c4/72/d3d0e9592f4e504f9dea08b8db270821c909558c353dc3b457ed2509f2fb/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19", size = 150054, upload-time = "2025-08-09T07:57:17.576Z" }, - { url = "https://files.pythonhosted.org/packages/20/30/5f64fe3981677fe63fa987b80e6c01042eb5ff653ff7cec1b7bd9268e54e/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312", size = 161703, upload-time = "2025-08-09T07:57:20.012Z" }, - { url = "https://files.pythonhosted.org/packages/e1/ef/dd08b2cac9284fd59e70f7d97382c33a3d0a926e45b15fc21b3308324ffd/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc", size = 159096, upload-time = "2025-08-09T07:57:21.329Z" }, - { url = "https://files.pythonhosted.org/packages/45/8c/dcef87cfc2b3f002a6478f38906f9040302c68aebe21468090e39cde1445/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34", size = 153852, upload-time = "2025-08-09T07:57:22.608Z" }, - { url = "https://files.pythonhosted.org/packages/63/86/9cbd533bd37883d467fcd1bd491b3547a3532d0fbb46de2b99feeebf185e/charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432", size = 99840, upload-time = "2025-08-09T07:57:23.883Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d6/7e805c8e5c46ff9729c49950acc4ee0aeb55efb8b3a56687658ad10c3216/charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca", size = 107438, upload-time = "2025-08-09T07:57:25.287Z" }, { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, -] - [[package]] name = "click" version = "8.3.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ @@ -718,31 +532,6 @@ version = "7.10.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, - { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, - { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, - { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, - { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, - { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, - { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, - { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, - { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, - { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, - { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, - { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, - { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, - { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, - { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, - { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, - { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, - { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, @@ -782,33 +571,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, - { url = "https://files.pythonhosted.org/packages/a3/ad/d1c25053764b4c42eb294aae92ab617d2e4f803397f9c7c8295caa77a260/coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3", size = 217978, upload-time = "2025-09-21T20:03:30.362Z" }, - { url = "https://files.pythonhosted.org/packages/52/2f/b9f9daa39b80ece0b9548bbb723381e29bc664822d9a12c2135f8922c22b/coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c", size = 218370, upload-time = "2025-09-21T20:03:32.147Z" }, - { url = "https://files.pythonhosted.org/packages/dd/6e/30d006c3b469e58449650642383dddf1c8fb63d44fdf92994bfd46570695/coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396", size = 244802, upload-time = "2025-09-21T20:03:33.919Z" }, - { url = "https://files.pythonhosted.org/packages/b0/49/8a070782ce7e6b94ff6a0b6d7c65ba6bc3091d92a92cef4cd4eb0767965c/coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40", size = 246625, upload-time = "2025-09-21T20:03:36.09Z" }, - { url = "https://files.pythonhosted.org/packages/6a/92/1c1c5a9e8677ce56d42b97bdaca337b2d4d9ebe703d8c174ede52dbabd5f/coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594", size = 248399, upload-time = "2025-09-21T20:03:38.342Z" }, - { url = "https://files.pythonhosted.org/packages/c0/54/b140edee7257e815de7426d5d9846b58505dffc29795fff2dfb7f8a1c5a0/coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a", size = 245142, upload-time = "2025-09-21T20:03:40.591Z" }, - { url = "https://files.pythonhosted.org/packages/e4/9e/6d6b8295940b118e8b7083b29226c71f6154f7ff41e9ca431f03de2eac0d/coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b", size = 246284, upload-time = "2025-09-21T20:03:42.355Z" }, - { url = "https://files.pythonhosted.org/packages/db/e5/5e957ca747d43dbe4d9714358375c7546cb3cb533007b6813fc20fce37ad/coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3", size = 244353, upload-time = "2025-09-21T20:03:44.218Z" }, - { url = "https://files.pythonhosted.org/packages/9a/45/540fc5cc92536a1b783b7ef99450bd55a4b3af234aae35a18a339973ce30/coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0", size = 244430, upload-time = "2025-09-21T20:03:46.065Z" }, - { url = "https://files.pythonhosted.org/packages/75/0b/8287b2e5b38c8fe15d7e3398849bb58d382aedc0864ea0fa1820e8630491/coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f", size = 245311, upload-time = "2025-09-21T20:03:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1d/29724999984740f0c86d03e6420b942439bf5bd7f54d4382cae386a9d1e9/coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431", size = 220500, upload-time = "2025-09-21T20:03:50.024Z" }, - { url = "https://files.pythonhosted.org/packages/43/11/4b1e6b129943f905ca54c339f343877b55b365ae2558806c1be4f7476ed5/coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07", size = 221408, upload-time = "2025-09-21T20:03:51.803Z" }, { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, ] -[package.optional-dependencies] -toml = [ - { name = "tomli", marker = "python_full_version <= '3.11'" }, -] - [[package]] name = "cryptography" version = "46.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } wheels = [ @@ -842,14 +613,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, - { url = "https://files.pythonhosted.org/packages/25/b2/067a7db693488f19777ecf73f925bcb6a3efa2eae42355bafaafa37a6588/cryptography-46.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f25a41f5b34b371a06dad3f01799706631331adc7d6c05253f5bca22068c7a34", size = 3701860, upload-time = "2025-10-01T00:28:53.003Z" }, - { url = "https://files.pythonhosted.org/packages/87/12/47c2aab2c285f97c71a791169529dbb89f48fc12e5f62bb6525c3927a1a2/cryptography-46.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e12b61e0b86611e3f4c1756686d9086c1d36e6fd15326f5658112ad1f1cc8807", size = 3429917, upload-time = "2025-10-01T00:28:55.03Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" }, - { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" }, - { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" }, - { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" }, - { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" }, ] [[package]] @@ -866,7 +629,6 @@ dependencies = [ { name = "packaging" }, { name = "pydantic" }, { name = "pyyaml" }, - { name = "tomli", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/70/e1/dbf7c2edb1b1db1f4fd472ee92f985ec97d58902512013d9c4584108329c/datamodel_code_generator-0.35.0.tar.gz", hash = "sha256:46805fa2515d3871f6bfafce9aa63128e735a7a6a4cfcbf9c27b3794ee4ea846", size = 459915, upload-time = "2025-10-09T19:26:49.837Z" } wheels = [ @@ -879,14 +641,6 @@ version = "1.8.17" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129, upload-time = "2025-09-17T16:33:20.633Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/36/b57c6e818d909f6e59c0182252921cf435e0951126a97e11de37e72ab5e1/debugpy-1.8.17-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:c41d2ce8bbaddcc0009cc73f65318eedfa3dbc88a8298081deb05389f1ab5542", size = 2098021, upload-time = "2025-09-17T16:33:22.556Z" }, - { url = "https://files.pythonhosted.org/packages/be/01/0363c7efdd1e9febd090bb13cee4fb1057215b157b2979a4ca5ccb678217/debugpy-1.8.17-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:1440fd514e1b815edd5861ca394786f90eb24960eb26d6f7200994333b1d79e3", size = 3087399, upload-time = "2025-09-17T16:33:24.292Z" }, - { url = "https://files.pythonhosted.org/packages/79/bc/4a984729674aa9a84856650438b9665f9a1d5a748804ac6f37932ce0d4aa/debugpy-1.8.17-cp310-cp310-win32.whl", hash = "sha256:3a32c0af575749083d7492dc79f6ab69f21b2d2ad4cd977a958a07d5865316e4", size = 5230292, upload-time = "2025-09-17T16:33:26.137Z" }, - { url = "https://files.pythonhosted.org/packages/5d/19/2b9b3092d0cf81a5aa10c86271999453030af354d1a5a7d6e34c574515d7/debugpy-1.8.17-cp310-cp310-win_amd64.whl", hash = "sha256:a3aad0537cf4d9c1996434be68c6c9a6d233ac6f76c2a482c7803295b4e4f99a", size = 5261885, upload-time = "2025-09-17T16:33:27.592Z" }, - { url = "https://files.pythonhosted.org/packages/d8/53/3af72b5c159278c4a0cf4cffa518675a0e73bdb7d1cac0239b815502d2ce/debugpy-1.8.17-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840", size = 2207154, upload-time = "2025-09-17T16:33:29.457Z" }, - { url = "https://files.pythonhosted.org/packages/8f/6d/204f407df45600e2245b4a39860ed4ba32552330a0b3f5f160ae4cc30072/debugpy-1.8.17-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f", size = 3170322, upload-time = "2025-09-17T16:33:30.837Z" }, - { url = "https://files.pythonhosted.org/packages/f2/13/1b8f87d39cf83c6b713de2620c31205299e6065622e7dd37aff4808dd410/debugpy-1.8.17-cp311-cp311-win32.whl", hash = "sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da", size = 5155078, upload-time = "2025-09-17T16:33:33.331Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c5/c012c60a2922cc91caa9675d0ddfbb14ba59e1e36228355f41cab6483469/debugpy-1.8.17-cp311-cp311-win_amd64.whl", hash = "sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4", size = 5179011, upload-time = "2025-09-17T16:33:35.711Z" }, { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522, upload-time = "2025-09-17T16:33:38.466Z" }, { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417, upload-time = "2025-09-17T16:33:41.299Z" }, { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130, upload-time = "2025-09-17T16:33:43.554Z" }, @@ -895,10 +649,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/60/ce5c34fcdfec493701f9d1532dba95b21b2f6394147234dce21160bd923f/debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088", size = 4292100, upload-time = "2025-09-17T16:33:56.353Z" }, { url = "https://files.pythonhosted.org/packages/e8/95/7873cf2146577ef71d2a20bf553f12df865922a6f87b9e8ee1df04f01785/debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83", size = 5277002, upload-time = "2025-09-17T16:33:58.231Z" }, { url = "https://files.pythonhosted.org/packages/46/11/18c79a1cee5ff539a94ec4aa290c1c069a5580fd5cfd2fb2e282f8e905da/debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420", size = 5319047, upload-time = "2025-09-17T16:34:00.586Z" }, - { url = "https://files.pythonhosted.org/packages/16/ee/0e9a08878f1b525f85c4e47723ea1f17b1bad69672c84fa910210604e3f8/debugpy-1.8.17-cp39-cp39-macosx_15_0_x86_64.whl", hash = "sha256:f2ac8055a0c4a09b30b931100996ba49ef334c6947e7ae365cdd870416d7513e", size = 2099309, upload-time = "2025-09-17T16:34:17.935Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b5/0327b27efd8826ca92a256a3a250e80ccad6a834b4d12bd9cbd491f2da03/debugpy-1.8.17-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:eaa85bce251feca8e4c87ce3b954aba84b8c645b90f0e6a515c00394a9f5c0e7", size = 3080100, upload-time = "2025-09-17T16:34:19.885Z" }, - { url = "https://files.pythonhosted.org/packages/0f/f0/2e210fa8884d2ab452fa31ffd1402e13010eaacfa67063d0565d97ac9e0e/debugpy-1.8.17-cp39-cp39-win32.whl", hash = "sha256:b13eea5587e44f27f6c48588b5ad56dcb74a4f3a5f89250443c94587f3eb2ea1", size = 5231016, upload-time = "2025-09-17T16:34:21.887Z" }, - { url = "https://files.pythonhosted.org/packages/d6/9b/6a45fb1553d09b618c9441bcbbf72b651246b83b5618b2f95c0e4cf1b8bd/debugpy-1.8.17-cp39-cp39-win_amd64.whl", hash = "sha256:bb1bbf92317e1f35afcf3ef0450219efb3afe00be79d8664b250ac0933b9015f", size = 5262778, upload-time = "2025-09-17T16:34:24.026Z" }, { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210, upload-time = "2025-09-17T16:34:25.835Z" }, ] @@ -941,27 +691,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" }, ] -[[package]] -name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, -] - [[package]] name = "dnspython" version = "2.8.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, @@ -982,28 +715,6 @@ version = "0.8.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d5/18/9f4f975ca87a390832b1c22478f3702fcdf739f83211e24d054b7551270d/editdistance-0.8.1.tar.gz", hash = "sha256:d1cdf80a5d5014b0c9126a69a42ce55a457b457f6986ff69ca98e4fe4d2d8fed", size = 50006, upload-time = "2024-02-10T07:44:53.914Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/c9/302658ce7f4c537a4e85cf578d11bbf7af120a712e1d78fedc6cb8823c65/editdistance-0.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:adeb705f32b93accc74960d227875abff150ee42d676e428536361fe5f8f5388", size = 106150, upload-time = "2024-02-10T07:43:15.903Z" }, - { url = "https://files.pythonhosted.org/packages/45/80/0b3c7d2c0e183725986fea5dd2df11f0b4b46320e9a64f6077a121ab1f64/editdistance-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3de77951b105d0972deec7684a0b3d1a9dee69c9b5d34f6e2acc0d76cd4a1c52", size = 80551, upload-time = "2024-02-10T07:43:17.64Z" }, - { url = "https://files.pythonhosted.org/packages/b5/14/681460965c6a4a48321b07f88de2273d097fdca0491ff55db891aacbd291/editdistance-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e88efb052d45e924606c305cb833a80579dca3e8e4ff01309d50ba2c1c0bbd5", size = 79142, upload-time = "2024-02-10T07:43:19.195Z" }, - { url = "https://files.pythonhosted.org/packages/ed/0d/abdbc8e394a9461cf2ae27c16564fadaa65f52bd242dd1582ae5e7736dc3/editdistance-0.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0247e7a1e9c66ea75211a97e725366bff19a52aac2c838ed5f90025630e976dd", size = 396768, upload-time = "2024-02-10T07:43:20.912Z" }, - { url = "https://files.pythonhosted.org/packages/c2/fb/2940d26ebda12efd280ae939436f17ac482930d862df9e774cb8b771ab03/editdistance-0.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67d143429a49ab552411505f550a0fb4285a1d4336e096804d233ec495ac20fc", size = 401846, upload-time = "2024-02-10T07:43:23.169Z" }, - { url = "https://files.pythonhosted.org/packages/53/cc/c63d75c7f387d4df0645682c1ab8706c2dfe5c9c0c4999723ce9a3ba0853/editdistance-0.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca9d3be2b10e5d44a950a4bd1e84bca9ebbecd364bce0cf5693bf8224c78eaef", size = 397543, upload-time = "2024-02-10T07:43:24.621Z" }, - { url = "https://files.pythonhosted.org/packages/8e/38/bb0f734a7571e093184606b930734b12da5b6bff2635eba9312fe4536dd9/editdistance-0.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5c72aa1df8535f2e2b3d8773a1a7da091bc1a7e52bb396e7e48d375ba687e7b2", size = 898934, upload-time = "2024-02-10T07:43:26.926Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9f/624fc7a09918f850a057465f02e86f269e139a457f48ff8cabfb12701756/editdistance-0.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a606c34a2a6cc190e4fffc856b36333cdcf1f1fab5b22bd3088e585c22d6ca0", size = 959637, upload-time = "2024-02-10T07:43:28.997Z" }, - { url = "https://files.pythonhosted.org/packages/5e/5c/7fa6cc277f91c477ee370807d51c1826891dc6dfc307544223ce7f2687de/editdistance-0.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5af173d442ffac33b7c7990132f97f88818a3abf4b21c0c702a7022df37c0c5c", size = 911024, upload-time = "2024-02-10T07:43:30.449Z" }, - { url = "https://files.pythonhosted.org/packages/ad/97/556215f71184291155aee340a6d34f0676e7238fdfd10615b6b775ce25fe/editdistance-0.8.1-cp310-cp310-win32.whl", hash = "sha256:fd64b58f5a7b59afd9d75982aaeeacd2a98498bf472fa0360c122ffe6ea4c871", size = 80834, upload-time = "2024-02-10T07:43:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d1/7ec5f5cbb95838d0eff7f980a660c81acd1363d658f2f5d4ceba38877c5a/editdistance-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:6c7c62c3cae45ca1fa01bb2722b297b9de1e3a244ac44cfba88bdcb488fe6aee", size = 79614, upload-time = "2024-02-10T07:43:33.255Z" }, - { url = "https://files.pythonhosted.org/packages/e2/dc/d0c29fd52d8f9e795653ed2b838a2a48c739cdfff04ac5b79c6c0ecbdf79/editdistance-0.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:486105603a273d73d12a54f347dffa70ab281749d7c3879658b377bc49e4b98c", size = 106079, upload-time = "2024-02-10T07:43:34.34Z" }, - { url = "https://files.pythonhosted.org/packages/b4/c6/75fa45d7b78fbea6fd894f4e48895a75bd3c83d4a9a6b57673881d74d3e0/editdistance-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fad081f5f86a175c1a09a4e9e45b95c9349e454c21e181e842e01c85f1f536fc", size = 80580, upload-time = "2024-02-10T07:43:35.947Z" }, - { url = "https://files.pythonhosted.org/packages/b7/a3/058d823b6285c3511dc94ed80620c3fb0c18b4aaa708f70ba71f3af28436/editdistance-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cb78e125f6759398885a775f5eed07c2bb72b2f86da43e674c6b6a3335b273b", size = 79087, upload-time = "2024-02-10T07:43:36.923Z" }, - { url = "https://files.pythonhosted.org/packages/a0/3a/0b13c7864c93b1e9b9952bd2a33c5ef3c4fd1bf70a5fad6924789e70e5eb/editdistance-0.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3778ca60aa89def9144b70e330bcec5330c7da1d69cb28c612e90b84510a1d3d", size = 409296, upload-time = "2024-02-10T07:43:38.52Z" }, - { url = "https://files.pythonhosted.org/packages/96/8a/db0fd79e8ddb9b5f86f274107c5d0a27ec4f2af88877df1f26c2c6d150cc/editdistance-0.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fba945eaa0436cf40bc53d7e299dc537c7c71353379a095b7459ff4af910da33", size = 412913, upload-time = "2024-02-10T07:43:39.852Z" }, - { url = "https://files.pythonhosted.org/packages/0d/d2/98be7112750ff17b436dd76f988f1e38570dcec0df8578ee19ef046f22fe/editdistance-0.8.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:877f2a0d801f32bc1a1878901ffb947b974361e849c66e314a7f1d786a446b58", size = 407430, upload-time = "2024-02-10T07:43:41.048Z" }, - { url = "https://files.pythonhosted.org/packages/03/62/1815e3bf164910c47ba1948c8b5e937a40c7f9763b64e98fb6666b01dd06/editdistance-0.8.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e79d351ca40a6ead5f3763253fd7521572ee0d3e5d42538630e56d10f48db481", size = 909217, upload-time = "2024-02-10T07:43:42.916Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d3/a832cea7b507a9be54e4ac3d1340fb66dca5f9c16c70bf38d5039e8fdede/editdistance-0.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:70ed382b3052a51161bad0149d4665003bf3b949fce0b01bf1253a4cc1a88239", size = 969407, upload-time = "2024-02-10T07:43:44.912Z" }, - { url = "https://files.pythonhosted.org/packages/a3/b4/db291d2a3845cbf8047b4b5aad3b3e038a8a2994d87027b40e1a1b0f4b74/editdistance-0.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a529bfb384c4000775d76739c4e64f73337f0f5a3784933b1321b577a62bed4e", size = 922112, upload-time = "2024-02-10T07:43:47.047Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/7ddeacada4982d0b892a28897e21871d0f25bca165e3663e37c3a272808a/editdistance-0.8.1-cp311-cp311-win32.whl", hash = "sha256:b082232429e731f181af7f7d2bcf79da6ca8fadd04e9086c11e2973f7d330c81", size = 80799, upload-time = "2024-02-10T07:43:48.231Z" }, - { url = "https://files.pythonhosted.org/packages/52/a1/778af8590b8b12f03f62eacc3c8744407ade9e3d69be6dabe38d0afbf2dd/editdistance-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:cef1a4359252a49f2c4718e64e9d40027d9d951b289d045bdb278656e59f6af8", size = 79698, upload-time = "2024-02-10T07:43:49.234Z" }, { url = "https://files.pythonhosted.org/packages/cb/4c/7f195588949b4e72436dc7fc902632381f96e586af829685b56daebb38b8/editdistance-0.8.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04af61b3fcdd287a07c15b6ae3b02af01c5e3e9c3aca76b8c1d13bd266b6f57", size = 106723, upload-time = "2024-02-10T07:43:50.268Z" }, { url = "https://files.pythonhosted.org/packages/8d/82/31dc1640d830cd7d36865098329f34e4dad3b77f31cfb9404b347e700196/editdistance-0.8.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:18fc8b6eaae01bfd9cf999af726c1e8dcf667d120e81aa7dbd515bea7427f62f", size = 80998, upload-time = "2024-02-10T07:43:51.259Z" }, { url = "https://files.pythonhosted.org/packages/ea/2a/6b823e71cef694d6f070a1d82be2842706fa193541aab8856a8f42044cd0/editdistance-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a87839450a5987028738d061ffa5ef6a68bac2ddc68c9147a8aae9806629c7f", size = 79248, upload-time = "2024-02-10T07:43:52.873Z" }, @@ -1015,27 +726,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/3d/9877566e724c8a37f2228a84ec5cbf66dbfd0673515baf68a0fe07caff40/editdistance-0.8.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e16721636da6d6b68a2c09eaced35a94f4a4a704ec09f45756d4fd5e128ed18d", size = 929121, upload-time = "2024-02-10T07:44:02.764Z" }, { url = "https://files.pythonhosted.org/packages/d2/f5/8c50757d198b8ca30ddb91e8b8f0247a8dca04ff2ec30755245f0ab1ff0c/editdistance-0.8.1-cp312-cp312-win32.whl", hash = "sha256:87533cf2ebc3777088d991947274cd7e1014b9c861a8aa65257bcdc0ee492526", size = 81039, upload-time = "2024-02-10T07:44:04.134Z" }, { url = "https://files.pythonhosted.org/packages/28/f0/65101e51dc7c850e7b7581a5d8fa8721a1d7479a0dca6c08386328e19882/editdistance-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:09f01ed51746d90178af7dd7ea4ebb41497ef19f53c7f327e864421743dffb0a", size = 79853, upload-time = "2024-02-10T07:44:05.687Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c3/4a18329c746cf7f35e02afff502b29581c298345292e70cfa9366e7d2ac7/editdistance-0.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4d8e9a3e65a68c13dcadc1d2caca620f1716a8d02f2602047e0721b509161ec7", size = 106139, upload-time = "2024-02-10T07:44:22.877Z" }, - { url = "https://files.pythonhosted.org/packages/73/81/de1fa8018b5d3ee7c622f3f19651967a9e107f4faaf0f172d2fa5a81f725/editdistance-0.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7994a6a0a6ae92db87c144e12f1549ca0e50f43c6cc64e32c628e7af6b9c74b6", size = 80544, upload-time = "2024-02-10T07:44:23.908Z" }, - { url = "https://files.pythonhosted.org/packages/1c/87/2625cfae8b83c68bf19b0db93350f15f114d92b5c592a6dbfedb8c8e2344/editdistance-0.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dbe0cbc15466e9b7fbf73e34bdcae11cb0c2acd09a60ef4740f2172f9aa5e751", size = 79131, upload-time = "2024-02-10T07:44:24.904Z" }, - { url = "https://files.pythonhosted.org/packages/2e/94/b3f68b42fe96ee86e5deaf375bffda0a33d0aea3c183e12648b329e70e11/editdistance-0.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc5f0c7f12a3a3bf2d129e2900deaaa5e47203ef61918343ddc4b6c03e50f089", size = 396599, upload-time = "2024-02-10T07:44:25.98Z" }, - { url = "https://files.pythonhosted.org/packages/8f/b1/c8634e2dddb7ea14d99b9e7a3988124b2da08a33343eb9290d908451a3de/editdistance-0.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98572c662fd7d425ff24acb8197ad4be7849558a48aebbc60012090bfda4dce9", size = 401643, upload-time = "2024-02-10T07:44:27.403Z" }, - { url = "https://files.pythonhosted.org/packages/9d/03/5f7567e9502f262d4c8ef62baa8ce20b0e9de8dc2aabe674152144b35d71/editdistance-0.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b35c647a8a17b77441e7b6111b74ae1016851589109e1efc990d27225b3217b", size = 397311, upload-time = "2024-02-10T07:44:28.621Z" }, - { url = "https://files.pythonhosted.org/packages/9a/55/d25497069197e4f5f0a961003d91e7396a5ba088ac4a2efb94e6b8941601/editdistance-0.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f56c0e006f6b5207985c1bdd62e1873e66bb06a60849cad32716cad1bb3ae40", size = 898633, upload-time = "2024-02-10T07:44:30.782Z" }, - { url = "https://files.pythonhosted.org/packages/cb/4e/fb19a6d8b8434dccfe32581cf01bdf2e4b901b00bf50784097a64dd02530/editdistance-0.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d6bc5a827b262dc9b0d03cfd821682334ce1280520edf6385dc1730e390b5201", size = 959353, upload-time = "2024-02-10T07:44:32.133Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4c/f515792d582489d2953c5b19bfca6818a6036f8c2e9d4eda4546f854d3cc/editdistance-0.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ad68a2357664e45823b38c9f67a315ff9771263ec502a710057b78c6ca6fcfcd", size = 910776, upload-time = "2024-02-10T07:44:33.579Z" }, - { url = "https://files.pythonhosted.org/packages/60/4d/a7ee5093a27e312800dadfa90de6c129d5ed5e7ae63e9bfe211db64ead63/editdistance-0.8.1-cp39-cp39-win32.whl", hash = "sha256:16b3e413c020e42b2ef2d4ba01386ead43007217f0bdd704e90474ace90d2023", size = 80828, upload-time = "2024-02-10T07:44:34.81Z" }, - { url = "https://files.pythonhosted.org/packages/0d/17/8c9dacbee53f4fc4ebfd25b0821c493aed6da547e457047f35c7430469c2/editdistance-0.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:331f1a8d3a753858a9d689c0bcd79ad1959e0df464bb6c22cb263cfb6da208e4", size = 79613, upload-time = "2024-02-10T07:44:35.784Z" }, - { url = "https://files.pythonhosted.org/packages/d4/4c/c9d02eeb47815d35f8d324b52f6704ea7beb032bcb209358cac44047d413/editdistance-0.8.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a4a90c6b03094c07358572027a8d0a13cca7450b1aa6caca98a5f1fa4f0b8961", size = 76455, upload-time = "2024-02-10T07:44:36.838Z" }, - { url = "https://files.pythonhosted.org/packages/af/b0/2818fa6a24595dac069b0bfb9d05658406779a1ded8fd2b0c9066396cf99/editdistance-0.8.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510a4f9ced348a4fd89ae2e102357d4d801a771e29bb2bc2f130a1692193407f", size = 84104, upload-time = "2024-02-10T07:44:37.928Z" }, - { url = "https://files.pythonhosted.org/packages/1f/d1/3d5e09bcf7fdb7aed705bf74047a8634bd2b8fd92177c25a2547e6dbadfb/editdistance-0.8.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4787fa7228ba6a34b430066d174320f011d605015baa7299c2c4911e6ea6bd46", size = 89058, upload-time = "2024-02-10T07:44:39.113Z" }, - { url = "https://files.pythonhosted.org/packages/cd/88/fca5d7b1a1edf66ce1e5b6b60bff75842e6814b4f5facbdf4585d88c912d/editdistance-0.8.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee02601375073afccd6b4d811129ce1cb696d47db734784d8dbd1fddcea75447", size = 84635, upload-time = "2024-02-10T07:44:40.714Z" }, - { url = "https://files.pythonhosted.org/packages/a9/91/0e6285bbe2358d81fd16313d30306b2d0036387348f7bc11d8c076ca3c72/editdistance-0.8.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bc7ad9f9a20e6f351523de77c59249f005242e3f317b5de45d02c378d24f6531", size = 77389, upload-time = "2024-02-10T07:44:41.725Z" }, - { url = "https://files.pythonhosted.org/packages/f1/19/655a7e06fb5d2b41c0f2838f27c2b1b804389fa0d42fa226eb04958bfb27/editdistance-0.8.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8131acb6b5170382b8b74efab92df8739ac591dc841314e0153af63c4493cb43", size = 76450, upload-time = "2024-02-10T07:44:48.499Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e5/45f8f204a43427e14453250fbd68c6ec8fcd4e4ea664eb3c5acd09856e64/editdistance-0.8.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f182e5e1d2a446138cab085409395c62af36eb1abcbe8cfacb083febfeafd5ce", size = 84108, upload-time = "2024-02-10T07:44:49.542Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ac/890a9abd8487254042a67ec9bd9f3dd64c3bd8f34c0b4015db0c4ad93890/editdistance-0.8.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f4f19a829aff230377041acb77afec73becbebafe35b7e322be00cdb3122ddb", size = 89054, upload-time = "2024-02-10T07:44:50.736Z" }, - { url = "https://files.pythonhosted.org/packages/91/cf/3130100071e1447c5607abe050986320617e8f6157856b101f5caa25b453/editdistance-0.8.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b978c5927100a57791131dd2418040f4e5d33970d37b97a84c1a530ec481f557", size = 84637, upload-time = "2024-02-10T07:44:51.735Z" }, - { url = "https://files.pythonhosted.org/packages/98/c5/2ab098d1f2b7ecc1e7dfdd534be7af615732c54b46e7ca2e53a908ad457b/editdistance-0.8.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c96a8e981f385f0b7392d047c5caab8e0b24f94b71120787fd78241efc34237", size = 77383, upload-time = "2024-02-10T07:44:52.73Z" }, ] [[package]] @@ -1043,8 +733,7 @@ name = "email-validator" version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "dnspython", version = "2.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "dnspython", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "dnspython" }, { name = "idna" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } @@ -1061,18 +750,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, ] -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, -] - [[package]] name = "execnet" version = "2.1.1" @@ -1115,38 +792,6 @@ version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/4a/557715d5047da48d54e659203b9335be7bfaafda2c3f627b7c47e0b3aaf3/frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011", size = 86230, upload-time = "2025-10-06T05:35:23.699Z" }, - { url = "https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565", size = 49621, upload-time = "2025-10-06T05:35:25.341Z" }, - { url = "https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad", size = 49889, upload-time = "2025-10-06T05:35:26.797Z" }, - { url = "https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2", size = 219464, upload-time = "2025-10-06T05:35:28.254Z" }, - { url = "https://files.pythonhosted.org/packages/6b/83/4d587dccbfca74cb8b810472392ad62bfa100bf8108c7223eb4c4fa2f7b3/frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186", size = 221649, upload-time = "2025-10-06T05:35:29.454Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c6/fd3b9cd046ec5fff9dab66831083bc2077006a874a2d3d9247dea93ddf7e/frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e", size = 219188, upload-time = "2025-10-06T05:35:30.951Z" }, - { url = "https://files.pythonhosted.org/packages/ce/80/6693f55eb2e085fc8afb28cf611448fb5b90e98e068fa1d1b8d8e66e5c7d/frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450", size = 231748, upload-time = "2025-10-06T05:35:32.101Z" }, - { url = "https://files.pythonhosted.org/packages/97/d6/e9459f7c5183854abd989ba384fe0cc1a0fb795a83c033f0571ec5933ca4/frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef", size = 236351, upload-time = "2025-10-06T05:35:33.834Z" }, - { url = "https://files.pythonhosted.org/packages/97/92/24e97474b65c0262e9ecd076e826bfd1d3074adcc165a256e42e7b8a7249/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4", size = 218767, upload-time = "2025-10-06T05:35:35.205Z" }, - { url = "https://files.pythonhosted.org/packages/ee/bf/dc394a097508f15abff383c5108cb8ad880d1f64a725ed3b90d5c2fbf0bb/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff", size = 235887, upload-time = "2025-10-06T05:35:36.354Z" }, - { url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c", size = 228785, upload-time = "2025-10-06T05:35:37.949Z" }, - { url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f", size = 230312, upload-time = "2025-10-06T05:35:39.178Z" }, - { url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7", size = 217650, upload-time = "2025-10-06T05:35:40.377Z" }, - { url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a", size = 39659, upload-time = "2025-10-06T05:35:41.863Z" }, - { url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6", size = 43837, upload-time = "2025-10-06T05:35:43.205Z" }, - { url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e", size = 39989, upload-time = "2025-10-06T05:35:44.596Z" }, - { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, - { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, - { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, - { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, - { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, - { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, - { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, - { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, - { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, - { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, - { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, - { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, - { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, @@ -1195,22 +840,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, - { url = "https://files.pythonhosted.org/packages/c2/59/ae5cdac87a00962122ea37bb346d41b66aec05f9ce328fa2b9e216f8967b/frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47", size = 86967, upload-time = "2025-10-06T05:37:55.607Z" }, - { url = "https://files.pythonhosted.org/packages/8a/10/17059b2db5a032fd9323c41c39e9d1f5f9d0c8f04d1e4e3e788573086e61/frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca", size = 49984, upload-time = "2025-10-06T05:37:57.049Z" }, - { url = "https://files.pythonhosted.org/packages/4b/de/ad9d82ca8e5fa8f0c636e64606553c79e2b859ad253030b62a21fe9986f5/frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068", size = 50240, upload-time = "2025-10-06T05:37:58.145Z" }, - { url = "https://files.pythonhosted.org/packages/4e/45/3dfb7767c2a67d123650122b62ce13c731b6c745bc14424eea67678b508c/frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95", size = 219472, upload-time = "2025-10-06T05:37:59.239Z" }, - { url = "https://files.pythonhosted.org/packages/0b/bf/5bf23d913a741b960d5c1dac7c1985d8a2a1d015772b2d18ea168b08e7ff/frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459", size = 221531, upload-time = "2025-10-06T05:38:00.521Z" }, - { url = "https://files.pythonhosted.org/packages/d0/03/27ec393f3b55860859f4b74cdc8c2a4af3dbf3533305e8eacf48a4fd9a54/frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675", size = 219211, upload-time = "2025-10-06T05:38:01.842Z" }, - { url = "https://files.pythonhosted.org/packages/3a/ad/0fd00c404fa73fe9b169429e9a972d5ed807973c40ab6b3cf9365a33d360/frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61", size = 231775, upload-time = "2025-10-06T05:38:03.384Z" }, - { url = "https://files.pythonhosted.org/packages/8a/c3/86962566154cb4d2995358bc8331bfc4ea19d07db1a96f64935a1607f2b6/frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6", size = 236631, upload-time = "2025-10-06T05:38:04.609Z" }, - { url = "https://files.pythonhosted.org/packages/ea/9e/6ffad161dbd83782d2c66dc4d378a9103b31770cb1e67febf43aea42d202/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5", size = 218632, upload-time = "2025-10-06T05:38:05.917Z" }, - { url = "https://files.pythonhosted.org/packages/58/b2/4677eee46e0a97f9b30735e6ad0bf6aba3e497986066eb68807ac85cf60f/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3", size = 235967, upload-time = "2025-10-06T05:38:07.614Z" }, - { url = "https://files.pythonhosted.org/packages/05/f3/86e75f8639c5a93745ca7addbbc9de6af56aebb930d233512b17e46f6493/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1", size = 228799, upload-time = "2025-10-06T05:38:08.845Z" }, - { url = "https://files.pythonhosted.org/packages/30/00/39aad3a7f0d98f5eb1d99a3c311215674ed87061aecee7851974b335c050/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178", size = 230566, upload-time = "2025-10-06T05:38:10.52Z" }, - { url = "https://files.pythonhosted.org/packages/0d/4d/aa144cac44568d137846ddc4d5210fb5d9719eb1d7ec6fa2728a54b5b94a/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda", size = 217715, upload-time = "2025-10-06T05:38:11.832Z" }, - { url = "https://files.pythonhosted.org/packages/64/4c/8f665921667509d25a0dd72540513bc86b356c95541686f6442a3283019f/frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087", size = 39933, upload-time = "2025-10-06T05:38:13.061Z" }, - { url = "https://files.pythonhosted.org/packages/79/bd/bcc926f87027fad5e59926ff12d136e1082a115025d33c032d1cd69ab377/frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a", size = 44121, upload-time = "2025-10-06T05:38:14.572Z" }, - { url = "https://files.pythonhosted.org/packages/4c/07/9c2e4eb7584af4b705237b971b89a4155a8e57599c4483a131a39256a9a0/frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103", size = 40312, upload-time = "2025-10-06T05:38:15.699Z" }, { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] @@ -1289,10 +918,11 @@ wheels = [ [[package]] name = "idp-common" -version = "0.3.13" +version = "0.4.2" source = { editable = "." } dependencies = [ { name = "boto3" }, + { name = "deepdiff" }, { name = "jsonschema" }, { name = "pydantic" }, ] @@ -1305,17 +935,18 @@ agentic-extraction = [ { name = "jsonpatch" }, { name = "pandas" }, { name = "pymupdf" }, - { name = "strands-agents", marker = "python_full_version >= '3.10'" }, + { name = "strands-agents" }, { name = "tabulate" }, ] agents = [ - { name = "bedrock-agentcore", marker = "python_full_version >= '3.10'" }, - { name = "strands-agents", marker = "python_full_version >= '3.10'" }, - { name = "strands-agents-tools", marker = "python_full_version >= '3.10'" }, + { name = "bedrock-agentcore" }, + { name = "strands-agents" }, + { name = "strands-agents-tools" }, ] all = [ { name = "amazon-textract-textractor", extra = ["pandas"] }, - { name = "bedrock-agentcore", marker = "python_full_version >= '3.10'" }, + { name = "bedrock-agentcore" }, + { name = "genson" }, { name = "munkres" }, { name = "numpy" }, { name = "openpyxl" }, @@ -1325,8 +956,9 @@ all = [ { name = "pymupdf" }, { name = "python-docx" }, { name = "requests" }, - { name = "strands-agents", marker = "python_full_version >= '3.10'" }, - { name = "strands-agents-tools", marker = "python_full_version >= '3.10'" }, + { name = "stickler-eval" }, + { name = "strands-agents" }, + { name = "strands-agents-tools" }, ] appsync = [ { name = "requests" }, @@ -1337,6 +969,15 @@ assessment = [ classification = [ { name = "pillow" }, ] +code-intel = [ + { name = "bedrock-agentcore" }, + { name = "chardet" }, + { name = "pathspec" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "strands-agents" }, + { name = "strands-agents-tools" }, +] criteria-validation = [ { name = "s3fs" }, ] @@ -1350,11 +991,12 @@ docs-service = [ { name = "requests" }, ] evaluation = [ + { name = "genson" }, { name = "munkres" }, { name = "numpy" }, + { name = "stickler-eval" }, ] extraction = [ - { name = "deepdiff" }, { name = "pillow" }, ] image = [ @@ -1393,7 +1035,7 @@ test = [ { name = "requests" }, { name = "rich" }, { name = "ruff" }, - { name = "strands-agents", marker = "python_full_version >= '3.10'" }, + { name = "strands-agents" }, { name = "tabulate" }, { name = "typer" }, ] @@ -1406,12 +1048,16 @@ requires-dist = [ { name = "aws-xray-sdk", marker = "extra == 'docs-service'", specifier = ">=2.14.0" }, { name = "bedrock-agentcore", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = ">=0.1.1" }, { name = "bedrock-agentcore", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = ">=0.1.1" }, + { name = "bedrock-agentcore", marker = "extra == 'code-intel'", specifier = ">=0.1.1" }, { name = "boto3", specifier = "==1.39.7" }, + { name = "chardet", marker = "extra == 'code-intel'", specifier = ">=5.0.0" }, { name = "datamodel-code-generator", marker = "extra == 'agentic-extraction'", specifier = ">=0.25.0" }, { name = "datamodel-code-generator", marker = "extra == 'test'", specifier = ">=0.25.0" }, - { name = "deepdiff", marker = "extra == 'extraction'", specifier = ">=6.0.0" }, + { name = "deepdiff", specifier = ">=6.0.0" }, { name = "deepdiff", marker = "extra == 'test'", specifier = ">=6.0.0" }, { name = "email-validator", marker = "extra == 'agentic-extraction'", specifier = ">=2.3.0" }, + { name = "genson", marker = "extra == 'all'", specifier = "==1.3.0" }, + { name = "genson", marker = "extra == 'evaluation'", specifier = "==1.3.0" }, { name = "idp-common", extras = ["agentic-extraction"], marker = "extra == 'test'" }, { name = "ipykernel", marker = "extra == 'dev'", specifier = ">=6.29.5,<7.0.0" }, { name = "jsonpatch", marker = "extra == 'agentic-extraction'", specifier = "==1.33" }, @@ -1430,6 +1076,7 @@ requires-dist = [ { name = "pandas", marker = "extra == 'agentic-extraction'", specifier = ">=2.2.3" }, { name = "pandas", marker = "extra == 'all'", specifier = "==2.2.3" }, { name = "pandas", marker = "extra == 'ocr'", specifier = "==2.2.3" }, + { name = "pathspec", marker = "extra == 'code-intel'", specifier = ">=0.11.0" }, { name = "pillow", marker = "extra == 'all'", specifier = "==11.2.1" }, { name = "pillow", marker = "extra == 'assessment'", specifier = "==11.2.1" }, { name = "pillow", marker = "extra == 'classification'", specifier = "==11.2.1" }, @@ -1441,7 +1088,7 @@ requires-dist = [ { name = "pyarrow", marker = "extra == 'reporting'", specifier = "==20.0.0" }, { name = "pyarrow", marker = "extra == 'test'", specifier = "==20.0.0" }, { name = "pydantic", specifier = ">=2.12.0" }, - { name = "pymupdf", marker = "extra == 'agentic-extraction'", specifier = ">=1.25.5" }, + { name = "pymupdf", marker = "extra == 'agentic-extraction'", specifier = "==1.25.5" }, { name = "pymupdf", marker = "extra == 'all'", specifier = "==1.25.5" }, { name = "pymupdf", marker = "extra == 'ocr'", specifier = "==1.25.5" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=7.4.0" }, @@ -1452,23 +1099,29 @@ requires-dist = [ { name = "python-docx", marker = "extra == 'ocr'", specifier = "==1.2.0" }, { name = "python-docx", marker = "extra == 'test'", specifier = "==1.2.0" }, { name = "python-dotenv", marker = "extra == 'dev'", specifier = ">=1.1.0,<2.0.0" }, + { name = "pyyaml", marker = "extra == 'code-intel'", specifier = ">=6.0.0" }, { name = "pyyaml", marker = "extra == 'test'", specifier = "==6.0.2" }, { name = "requests", marker = "extra == 'all'", specifier = "==2.32.4" }, { name = "requests", marker = "extra == 'appsync'", specifier = "==2.32.4" }, + { name = "requests", marker = "extra == 'code-intel'", specifier = "==2.32.4" }, { name = "requests", marker = "extra == 'docs-service'", specifier = "==2.32.4" }, { name = "requests", marker = "extra == 'test'", specifier = ">=2.32.3,<3.0.0" }, { name = "rich", marker = "extra == 'test'", specifier = ">=13.0.0" }, { name = "ruff", marker = "extra == 'test'", specifier = ">=0.14.0" }, { name = "s3fs", marker = "extra == 'criteria-validation'", specifier = "==2023.12.2" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agentic-extraction'", specifier = ">=1.7.1" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = ">=1.0.0" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = ">=1.0.0" }, - { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = ">=0.2.2" }, - { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = ">=0.2.2" }, + { name = "stickler-eval", marker = "extra == 'all'", specifier = "==0.1.2" }, + { name = "stickler-eval", marker = "extra == 'evaluation'", specifier = "==0.1.2" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agentic-extraction'", specifier = "==1.14.0" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==1.14.0" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==1.14.0" }, + { name = "strands-agents", marker = "extra == 'code-intel'", specifier = "==1.14.0" }, + { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==0.2.13" }, + { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==0.2.13" }, + { name = "strands-agents-tools", marker = "extra == 'code-intel'", specifier = "==0.2.13" }, { name = "tabulate", marker = "extra == 'agentic-extraction'", specifier = ">=0.9.0" }, { name = "typer", marker = "extra == 'test'", specifier = ">=0.19.2" }, ] -provides-extras = ["core", "dev", "image", "ocr", "classification", "extraction", "assessment", "evaluation", "criteria-validation", "reporting", "appsync", "agents", "docs-service", "test", "all", "agentic-extraction"] +provides-extras = ["core", "dev", "image", "ocr", "classification", "extraction", "assessment", "evaluation", "criteria-validation", "reporting", "appsync", "agents", "code-intel", "docs-service", "test", "all", "agentic-extraction"] [[package]] name = "importlib-metadata" @@ -1512,9 +1165,7 @@ dependencies = [ { name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "comm" }, { name = "debugpy" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -1530,76 +1181,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/c7/b445faca8deb954fe536abebff4ece5b097b923de482b26e78448c89d1dd/ipykernel-6.30.1-py3-none-any.whl", hash = "sha256:aa6b9fb93dca949069d8b85b6c79b2518e32ac583ae9c7d37c51d119e18b3fb4", size = 117484, upload-time = "2025-08-04T15:47:32.622Z" }, ] -[[package]] -name = "ipython" -version = "8.18.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version < '3.10'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, - { name = "jedi", marker = "python_full_version < '3.10'" }, - { name = "matplotlib-inline", marker = "python_full_version < '3.10'" }, - { name = "pexpect", marker = "python_full_version < '3.10' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "stack-data", marker = "python_full_version < '3.10'" }, - { name = "traitlets", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/b9/3ba6c45a6df813c09a48bac313c22ff83efa26cbb55011218d925a46e2ad/ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27", size = 5486330, upload-time = "2023-11-27T09:58:34.596Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/6b/d9fdcdef2eb6a23f391251fde8781c38d42acd82abe84d054cb74f7863b0/ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397", size = 808161, upload-time = "2023-11-27T09:58:30.538Z" }, -] - -[[package]] -name = "ipython" -version = "8.37.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version == '3.10.*'" }, - { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, - { name = "jedi", marker = "python_full_version == '3.10.*'" }, - { name = "matplotlib-inline", marker = "python_full_version == '3.10.*'" }, - { name = "pexpect", marker = "python_full_version == '3.10.*' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version == '3.10.*'" }, - { name = "pygments", marker = "python_full_version == '3.10.*'" }, - { name = "stack-data", marker = "python_full_version == '3.10.*'" }, - { name = "traitlets", marker = "python_full_version == '3.10.*'" }, - { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/85/31/10ac88f3357fc276dc8a64e8880c82e80e7459326ae1d0a211b40abf6665/ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216", size = 5606088, upload-time = "2025-05-31T16:39:09.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/d0/274fbf7b0b12643cbbc001ce13e6a5b1607ac4929d1b11c72460152c9fc3/ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2", size = 831864, upload-time = "2025-05-31T16:39:06.38Z" }, -] - [[package]] name = "ipython" version = "9.6.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", -] dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version >= '3.11'" }, - { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11'" }, - { name = "jedi", marker = "python_full_version >= '3.11'" }, - { name = "matplotlib-inline", marker = "python_full_version >= '3.11'" }, - { name = "pexpect", marker = "python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.11'" }, - { name = "pygments", marker = "python_full_version >= '3.11'" }, - { name = "stack-data", marker = "python_full_version >= '3.11'" }, - { name = "traitlets", marker = "python_full_version >= '3.11'" }, - { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "ipython-pygments-lexers" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" } wheels = [ @@ -1611,7 +1207,7 @@ name = "ipython-pygments-lexers" version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "pygments" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } wheels = [ @@ -1624,9 +1220,7 @@ version = "8.1.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "comm" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython" }, { name = "jupyterlab-widgets" }, { name = "traitlets" }, { name = "widgetsnbextension" }, @@ -1652,9 +1246,6 @@ wheels = [ name = "isort" version = "6.1.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/1e/82/fa43935523efdfcce6abbae9da7f372b627b27142c3419fcf13bf5b0c397/isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481", size = 824325, upload-time = "2025-10-01T16:26:45.027Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7f/cc/9b681a170efab4868a032631dea1e8446d8ec718a7f657b94d49d1a12643/isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784", size = 94329, upload-time = "2025-10-01T16:26:43.291Z" }, @@ -1785,7 +1376,6 @@ name = "jupyter-client" version = "8.6.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jupyter-core" }, { name = "python-dateutil" }, { name = "pyzmq" }, @@ -1803,9 +1393,7 @@ version = "6.6.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipykernel" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "prompt-toolkit" }, @@ -1823,8 +1411,7 @@ name = "jupyter-core" version = "5.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs" }, { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, { name = "traitlets" }, ] @@ -1857,7 +1444,6 @@ name = "jupyter-lsp" version = "2.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jupyter-server" }, ] sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823, upload-time = "2025-08-27T17:47:34.671Z" } @@ -1879,7 +1465,6 @@ dependencies = [ { name = "jupyter-server-terminals" }, { name = "nbconvert" }, { name = "nbformat" }, - { name = "overrides", marker = "python_full_version < '3.12'" }, { name = "packaging" }, { name = "prometheus-client" }, { name = "pywinpty", marker = "os_name == 'nt'" }, @@ -1915,7 +1500,6 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-lru" }, { name = "httpx" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "ipykernel" }, { name = "jinja2" }, { name = "jupyter-core" }, @@ -1925,7 +1509,6 @@ dependencies = [ { name = "notebook-shim" }, { name = "packaging" }, { name = "setuptools" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "tornado" }, { name = "traitlets" }, ] @@ -1949,7 +1532,6 @@ version = "2.27.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jinja2" }, { name = "json5" }, { name = "jsonschema" }, @@ -1986,38 +1568,6 @@ version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" }, - { url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" }, - { url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" }, - { url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" }, - { url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" }, - { url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" }, - { url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" }, - { url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" }, - { url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" }, - { url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" }, - { url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" }, - { url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" }, - { url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" }, - { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, - { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, - { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, - { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, - { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, - { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, - { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, - { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, - { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, - { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, - { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, - { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, @@ -2054,62 +1604,14 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700, upload-time = "2025-09-22T04:02:24.465Z" }, { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347, upload-time = "2025-09-22T04:02:26.286Z" }, { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248, upload-time = "2025-09-22T04:02:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/38/66/dd13c74fad495957374c8a81c932f4874d3dca5aa0db9e4369f06a399718/lxml-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2c8458c2cdd29589a8367c09c8f030f1d202be673f0ca224ec18590b3b9fb694", size = 8602363, upload-time = "2025-09-22T04:03:58.698Z" }, - { url = "https://files.pythonhosted.org/packages/5e/f4/edb9d47dce464b5dd044d35775ee794364935b93ab6226c95e199118890d/lxml-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3fee0851639d06276e6b387f1c190eb9d7f06f7f53514e966b26bae46481ec90", size = 4634995, upload-time = "2025-09-22T04:04:01.122Z" }, - { url = "https://files.pythonhosted.org/packages/66/f2/d80c97b6ed83a99bc24b2b29919d5e618af5322df6d3aa61064093712309/lxml-6.0.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2142a376b40b6736dfc214fd2902409e9e3857eff554fed2d3c60f097e62a62", size = 5003737, upload-time = "2025-09-22T04:04:02.98Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f1/18b750f79f8889b9109b24749f23ac137870b4f685edc4be54be0ff2c730/lxml-6.0.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6b5b39cc7e2998f968f05309e666103b53e2edd01df8dc51b90d734c0825444", size = 5160821, upload-time = "2025-09-22T04:04:04.854Z" }, - { url = "https://files.pythonhosted.org/packages/cf/88/2b6a415dbad411c3e9c092128eb7db06054d2d9460aa56676d17ee4f4fd5/lxml-6.0.2-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4aec24d6b72ee457ec665344a29acb2d35937d5192faebe429ea02633151aad", size = 5070959, upload-time = "2025-09-22T04:04:07.042Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d0/5354afaa0f2e53625e5f96f6bd049a4875c3ab79d96d6c4871dd1f4a98c4/lxml-6.0.2-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:b42f4d86b451c2f9d06ffb4f8bbc776e04df3ba070b9fe2657804b1b40277c48", size = 5410267, upload-time = "2025-09-22T04:04:10.458Z" }, - { url = "https://files.pythonhosted.org/packages/51/63/10dea35a01291dc529fa9d6ba204ea627a1c77b7fbb180d404f6cc4dd2fd/lxml-6.0.2-cp39-cp39-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cdaefac66e8b8f30e37a9b4768a391e1f8a16a7526d5bc77a7928408ef68e93", size = 5292990, upload-time = "2025-09-22T04:04:12.405Z" }, - { url = "https://files.pythonhosted.org/packages/37/58/51ef422d8bec58db600b3552e5f2d870ec01ffacf11d98689c42ffdcbf7f/lxml-6.0.2-cp39-cp39-manylinux_2_31_armv7l.whl", hash = "sha256:b738f7e648735714bbb82bdfd030203360cfeab7f6e8a34772b3c8c8b820568c", size = 4776318, upload-time = "2025-09-22T04:04:14.22Z" }, - { url = "https://files.pythonhosted.org/packages/77/97/3f797820e82e3a58a19bc51068b40f3b9ab7d0934ba6e5ba6b147b618319/lxml-6.0.2-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:daf42de090d59db025af61ce6bdb2521f0f102ea0e6ea310f13c17610a97da4c", size = 5360191, upload-time = "2025-09-22T04:04:16.236Z" }, - { url = "https://files.pythonhosted.org/packages/e2/14/a9306a8ab122e2f5dfbf4f71fb09beeadca26b0c275708432bbc33f40edc/lxml-6.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:66328dabea70b5ba7e53d94aa774b733cf66686535f3bc9250a7aab53a91caaf", size = 5116114, upload-time = "2025-09-22T04:04:18.594Z" }, - { url = "https://files.pythonhosted.org/packages/ea/23/2118a1685277b9fa8726ec7ee903db55aa300dcea3d406a220cbe3710953/lxml-6.0.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:e237b807d68a61fc3b1e845407e27e5eb8ef69bc93fe8505337c1acb4ee300b6", size = 4801704, upload-time = "2025-09-22T04:04:20.466Z" }, - { url = "https://files.pythonhosted.org/packages/4e/e8/d5be34da2059dc9a4ff8643fd6ad3f8234a27b2a44831b7fff58c4dbb3e3/lxml-6.0.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:ac02dc29fd397608f8eb15ac1610ae2f2f0154b03f631e6d724d9e2ad4ee2c84", size = 5355451, upload-time = "2025-09-22T04:04:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/61/84/5aebc8e150d5bf488815ea2d8798c7ff509cc37b5725caa3c1f11bdd3245/lxml-6.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:817ef43a0c0b4a77bd166dc9a09a555394105ff3374777ad41f453526e37f9cb", size = 5318630, upload-time = "2025-09-22T04:04:24.301Z" }, - { url = "https://files.pythonhosted.org/packages/35/04/629ae603c1c17fb7adc9df2bc21aa5ac96afb84001700b13c1f038f3118c/lxml-6.0.2-cp39-cp39-win32.whl", hash = "sha256:bc532422ff26b304cfb62b328826bd995c96154ffd2bac4544f37dbb95ecaa8f", size = 3614032, upload-time = "2025-09-22T04:04:26.158Z" }, - { url = "https://files.pythonhosted.org/packages/71/de/07b7b1249acbecbf48f7e42c3ce87a657af6ff38e30f12a1ad81f16010f2/lxml-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:995e783eb0374c120f528f807443ad5a83a656a8624c467ea73781fc5f8a8304", size = 4035311, upload-time = "2025-09-22T04:04:28.413Z" }, - { url = "https://files.pythonhosted.org/packages/60/e3/02c4c55b281606f3c8e118300e16a9fcf5f3462cc46ce740ed0b82fc3f1b/lxml-6.0.2-cp39-cp39-win_arm64.whl", hash = "sha256:08b9d5e803c2e4725ae9e8559ee880e5328ed61aa0935244e0515d7d9dbec0aa", size = 3683462, upload-time = "2025-09-22T04:04:30.399Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" }, - { url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" }, - { url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" }, - { url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, - { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, - { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, - { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, - { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, - { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "mdurl", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, ] [[package]] name = "markdown-it-py" version = "4.0.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] dependencies = [ - { name = "mdurl", marker = "python_full_version >= '3.10'" }, + { name = "mdurl" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ @@ -2121,8 +1623,8 @@ name = "markdownify" version = "1.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "beautifulsoup4", marker = "python_full_version >= '3.10'" }, - { name = "six", marker = "python_full_version >= '3.10'" }, + { name = "beautifulsoup4" }, + { name = "six" }, ] sdist = { url = "https://files.pythonhosted.org/packages/83/1b/6f2697b51eaca81f08852fd2734745af15718fea10222a1d40f8a239c4ea/markdownify-1.2.0.tar.gz", hash = "sha256:f6c367c54eb24ee953921804dfe6d6575c5e5b42c643955e7242034435de634c", size = 18771, upload-time = "2025-08-09T17:44:15.302Z" } wheels = [ @@ -2135,28 +1637,6 @@ version = "3.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, - { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, - { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, - { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, - { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, - { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, - { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, - { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, - { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, - { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, - { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, - { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, - { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, - { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, - { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, - { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, @@ -2190,17 +1670,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, - { url = "https://files.pythonhosted.org/packages/56/23/0d8c13a44bde9154821586520840643467aee574d8ce79a17da539ee7fed/markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26", size = 11623, upload-time = "2025-09-27T18:37:29.296Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/07a2cb9a8045d5f3f0890a8c3bc0859d7a47bfd9a560b563899bec7b72ed/markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc", size = 12049, upload-time = "2025-09-27T18:37:30.234Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e4/6be85eb81503f8e11b61c0b6369b6e077dcf0a74adbd9ebf6b349937b4e9/markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c", size = 21923, upload-time = "2025-09-27T18:37:31.177Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bc/4dc914ead3fe6ddaef035341fee0fc956949bbd27335b611829292b89ee2/markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42", size = 20543, upload-time = "2025-09-27T18:37:32.168Z" }, - { url = "https://files.pythonhosted.org/packages/89/6e/5fe81fbcfba4aef4093d5f856e5c774ec2057946052d18d168219b7bd9f9/markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b", size = 20585, upload-time = "2025-09-27T18:37:33.166Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/e0e5a3d3ae9c4020f696cd055f940ef86b64fe88de26f3a0308b9d3d048c/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758", size = 21387, upload-time = "2025-09-27T18:37:34.185Z" }, - { url = "https://files.pythonhosted.org/packages/c8/25/651753ef4dea08ea790f4fbb65146a9a44a014986996ca40102e237aa49a/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2", size = 20133, upload-time = "2025-09-27T18:37:35.138Z" }, - { url = "https://files.pythonhosted.org/packages/dc/0a/c3cf2b4fef5f0426e8a6d7fce3cb966a17817c568ce59d76b92a233fdbec/markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d", size = 20588, upload-time = "2025-09-27T18:37:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/cd/1b/a7782984844bd519ad4ffdbebbba2671ec5d0ebbeac34736c15fb86399e8/markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7", size = 14566, upload-time = "2025-09-27T18:37:37.09Z" }, - { url = "https://files.pythonhosted.org/packages/18/1f/8d9c20e1c9440e215a44be5ab64359e207fcb4f675543f1cf9a2a7f648d0/markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e", size = 15053, upload-time = "2025-09-27T18:37:38.054Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d3/fe08482b5cd995033556d45041a4f4e76e7f0521112a9c9991d40d39825f/markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8", size = 13928, upload-time = "2025-09-27T18:37:39.037Z" }, ] [[package]] @@ -2232,17 +1701,17 @@ name = "mcp" version = "1.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "python_full_version >= '3.10'" }, - { name = "httpx", marker = "python_full_version >= '3.10'" }, - { name = "httpx-sse", marker = "python_full_version >= '3.10'" }, - { name = "jsonschema", marker = "python_full_version >= '3.10'" }, - { name = "pydantic", marker = "python_full_version >= '3.10'" }, - { name = "pydantic-settings", marker = "python_full_version >= '3.10'" }, - { name = "python-multipart", marker = "python_full_version >= '3.10'" }, - { name = "pywin32", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, - { name = "sse-starlette", marker = "python_full_version >= '3.10'" }, - { name = "starlette", marker = "python_full_version >= '3.10'" }, - { name = "uvicorn", marker = "python_full_version >= '3.10' and sys_platform != 'emscripten'" }, + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5a/79/5724a540df19e192e8606c543cdcf162de8eb435077520cca150f7365ec0/mcp-1.17.0.tar.gz", hash = "sha256:1b57fabf3203240ccc48e39859faf3ae1ccb0b571ff798bbedae800c73c6df90", size = 477951, upload-time = "2025-10-10T12:16:44.519Z" } wheels = [ @@ -2262,9 +1731,6 @@ wheels = [ name = "mistune" version = "3.1.4" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588, upload-time = "2025-08-29T07:20:43.594Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481, upload-time = "2025-08-29T07:20:42.218Z" }, @@ -2318,47 +1784,8 @@ wheels = [ name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/63/7bdd4adc330abcca54c85728db2327130e49e52e8c3ce685cec44e0f2e9f/multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349", size = 77153, upload-time = "2025-10-06T14:48:26.409Z" }, - { url = "https://files.pythonhosted.org/packages/3f/bb/b6c35ff175ed1a3142222b78455ee31be71a8396ed3ab5280fbe3ebe4e85/multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e", size = 44993, upload-time = "2025-10-06T14:48:28.4Z" }, - { url = "https://files.pythonhosted.org/packages/e0/1f/064c77877c5fa6df6d346e68075c0f6998547afe952d6471b4c5f6a7345d/multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3", size = 44607, upload-time = "2025-10-06T14:48:29.581Z" }, - { url = "https://files.pythonhosted.org/packages/04/7a/bf6aa92065dd47f287690000b3d7d332edfccb2277634cadf6a810463c6a/multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046", size = 241847, upload-time = "2025-10-06T14:48:32.107Z" }, - { url = "https://files.pythonhosted.org/packages/94/39/297a8de920f76eda343e4ce05f3b489f0ab3f9504f2576dfb37b7c08ca08/multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32", size = 242616, upload-time = "2025-10-06T14:48:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/39/3a/d0eee2898cfd9d654aea6cb8c4addc2f9756e9a7e09391cfe55541f917f7/multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73", size = 222333, upload-time = "2025-10-06T14:48:35.9Z" }, - { url = "https://files.pythonhosted.org/packages/05/48/3b328851193c7a4240815b71eea165b49248867bbb6153a0aee227a0bb47/multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc", size = 253239, upload-time = "2025-10-06T14:48:37.302Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ca/0706a98c8d126a89245413225ca4a3fefc8435014de309cf8b30acb68841/multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62", size = 251618, upload-time = "2025-10-06T14:48:38.963Z" }, - { url = "https://files.pythonhosted.org/packages/5e/4f/9c7992f245554d8b173f6f0a048ad24b3e645d883f096857ec2c0822b8bd/multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84", size = 241655, upload-time = "2025-10-06T14:48:40.312Z" }, - { url = "https://files.pythonhosted.org/packages/31/79/26a85991ae67efd1c0b1fc2e0c275b8a6aceeb155a68861f63f87a798f16/multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0", size = 239245, upload-time = "2025-10-06T14:48:41.848Z" }, - { url = "https://files.pythonhosted.org/packages/14/1e/75fa96394478930b79d0302eaf9a6c69f34005a1a5251ac8b9c336486ec9/multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e", size = 233523, upload-time = "2025-10-06T14:48:43.749Z" }, - { url = "https://files.pythonhosted.org/packages/b2/5e/085544cb9f9c4ad2b5d97467c15f856df8d9bac410cffd5c43991a5d878b/multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4", size = 243129, upload-time = "2025-10-06T14:48:45.225Z" }, - { url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648", size = 248999, upload-time = "2025-10-06T14:48:46.703Z" }, - { url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111", size = 243711, upload-time = "2025-10-06T14:48:48.146Z" }, - { url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36", size = 237504, upload-time = "2025-10-06T14:48:49.447Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85", size = 41422, upload-time = "2025-10-06T14:48:50.789Z" }, - { url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7", size = 46050, upload-time = "2025-10-06T14:48:51.938Z" }, - { url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0", size = 43153, upload-time = "2025-10-06T14:48:53.146Z" }, - { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, - { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, - { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, - { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, - { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, - { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, - { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, - { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, - { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, - { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, - { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, - { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, - { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, - { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, - { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, @@ -2413,24 +1840,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, - { url = "https://files.pythonhosted.org/packages/90/d7/4cf84257902265c4250769ac49f4eaab81c182ee9aff8bf59d2714dbb174/multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c", size = 77073, upload-time = "2025-10-06T14:51:57.386Z" }, - { url = "https://files.pythonhosted.org/packages/6d/51/194e999630a656e76c2965a1590d12faa5cd528170f2abaa04423e09fe8d/multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40", size = 44928, upload-time = "2025-10-06T14:51:58.791Z" }, - { url = "https://files.pythonhosted.org/packages/e5/6b/2a195373c33068c9158e0941d0b46cfcc9c1d894ca2eb137d1128081dff0/multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851", size = 44581, upload-time = "2025-10-06T14:52:00.174Z" }, - { url = "https://files.pythonhosted.org/packages/69/7b/7f4f2e644b6978bf011a5fd9a5ebb7c21de3f38523b1f7897d36a1ac1311/multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687", size = 239901, upload-time = "2025-10-06T14:52:02.416Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b5/952c72786710a031aa204a9adf7db66d7f97a2c6573889d58b9e60fe6702/multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5", size = 240534, upload-time = "2025-10-06T14:52:04.105Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ef/109fe1f2471e4c458c74242c7e4a833f2d9fc8a6813cd7ee345b0bad18f9/multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb", size = 219545, upload-time = "2025-10-06T14:52:06.208Z" }, - { url = "https://files.pythonhosted.org/packages/42/bd/327d91288114967f9fe90dc53de70aa3fec1b9073e46aa32c4828f771a87/multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6", size = 251187, upload-time = "2025-10-06T14:52:08.049Z" }, - { url = "https://files.pythonhosted.org/packages/f4/13/a8b078ebbaceb7819fd28cd004413c33b98f1b70d542a62e6a00b74fb09f/multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e", size = 249379, upload-time = "2025-10-06T14:52:09.831Z" }, - { url = "https://files.pythonhosted.org/packages/e3/6d/ab12e1246be4d65d1f55de1e6f6aaa9b8120eddcfdd1d290439c7833d5ce/multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e", size = 239241, upload-time = "2025-10-06T14:52:11.561Z" }, - { url = "https://files.pythonhosted.org/packages/bb/d7/079a93625208c173b8fa756396814397c0fd9fee61ef87b75a748820b86e/multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32", size = 237418, upload-time = "2025-10-06T14:52:13.671Z" }, - { url = "https://files.pythonhosted.org/packages/c9/29/03777c2212274aa9440918d604dc9d6af0e6b4558c611c32c3dcf1a13870/multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c", size = 232987, upload-time = "2025-10-06T14:52:15.708Z" }, - { url = "https://files.pythonhosted.org/packages/d9/00/11188b68d85a84e8050ee34724d6ded19ad03975caebe0c8dcb2829b37bf/multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84", size = 240985, upload-time = "2025-10-06T14:52:17.317Z" }, - { url = "https://files.pythonhosted.org/packages/df/0c/12eef6aeda21859c6cdf7d75bd5516d83be3efe3d8cc45fd1a3037f5b9dc/multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329", size = 246855, upload-time = "2025-10-06T14:52:19.096Z" }, - { url = "https://files.pythonhosted.org/packages/69/f6/076120fd8bb3975f09228e288e08bff6b9f1bfd5166397c7ba284f622ab2/multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e", size = 241804, upload-time = "2025-10-06T14:52:21.166Z" }, - { url = "https://files.pythonhosted.org/packages/5f/51/41bb950c81437b88a93e6ddfca1d8763569ae861e638442838c4375f7497/multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4", size = 235321, upload-time = "2025-10-06T14:52:23.208Z" }, - { url = "https://files.pythonhosted.org/packages/5a/cf/5bbd31f055199d56c1f6b04bbadad3ccb24e6d5d4db75db774fc6d6674b8/multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91", size = 41435, upload-time = "2025-10-06T14:52:24.735Z" }, - { url = "https://files.pythonhosted.org/packages/af/01/547ffe9c2faec91c26965c152f3fea6cff068b6037401f61d310cc861ff4/multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f", size = 46193, upload-time = "2025-10-06T14:52:26.101Z" }, - { url = "https://files.pythonhosted.org/packages/27/77/cfa5461d1d2651d6fc24216c92b4a21d4e385a41c46e0d9f3b070675167b/multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546", size = 43118, upload-time = "2025-10-06T14:52:27.876Z" }, { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] @@ -2475,7 +1884,6 @@ dependencies = [ { name = "beautifulsoup4" }, { name = "bleach", extra = ["css"] }, { name = "defusedxml" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "jinja2" }, { name = "jupyter-core" }, { name = "jupyterlab-pygments" }, @@ -2551,22 +1959,6 @@ version = "1.26.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468, upload-time = "2024-02-05T23:48:01.194Z" }, - { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411, upload-time = "2024-02-05T23:48:29.038Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016, upload-time = "2024-02-05T23:48:54.098Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889, upload-time = "2024-02-05T23:49:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746, upload-time = "2024-02-05T23:49:51.983Z" }, - { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620, upload-time = "2024-02-05T23:50:22.515Z" }, - { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659, upload-time = "2024-02-05T23:50:35.834Z" }, - { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905, upload-time = "2024-02-05T23:51:03.701Z" }, - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, @@ -2575,17 +1967,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643, upload-time = "2024-02-05T23:57:56.585Z" }, { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803, upload-time = "2024-02-05T23:58:08.963Z" }, { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/7d/24/ce71dc08f06534269f66e73c04f5709ee024a1afe92a7b6e1d73f158e1f8/numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", size = 20636301, upload-time = "2024-02-05T23:59:10.976Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8c/ab03a7c25741f9ebc92684a20125fbc9fc1b8e1e700beb9197d750fdff88/numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", size = 13971216, upload-time = "2024-02-05T23:59:35.472Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/c3bcdf822269421d85fe0d64ba972003f9bb4aa9a419da64b86856c9961f/numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", size = 14226281, upload-time = "2024-02-05T23:59:59.372Z" }, - { url = "https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", size = 18249516, upload-time = "2024-02-06T00:00:32.79Z" }, - { url = "https://files.pythonhosted.org/packages/43/12/01a563fc44c07095996d0129b8899daf89e4742146f7044cdbdb3101c57f/numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", size = 13882132, upload-time = "2024-02-06T00:00:58.197Z" }, - { url = "https://files.pythonhosted.org/packages/16/ee/9df80b06680aaa23fc6c31211387e0db349e0e36d6a63ba3bd78c5acdf11/numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", size = 18084181, upload-time = "2024-02-06T00:01:31.21Z" }, - { url = "https://files.pythonhosted.org/packages/28/7d/4b92e2fe20b214ffca36107f1a3e75ef4c488430e64de2d9af5db3a4637d/numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", size = 5976360, upload-time = "2024-02-06T00:01:43.013Z" }, - { url = "https://files.pythonhosted.org/packages/b5/42/054082bd8220bbf6f297f982f0a8f5479fcbc55c8b511d928df07b965869/numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", size = 15814633, upload-time = "2024-02-06T00:02:16.694Z" }, - { url = "https://files.pythonhosted.org/packages/3f/72/3df6c1c06fc83d9cfe381cccb4be2532bbd38bf93fbc9fad087b6687f1c0/numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", size = 20455961, upload-time = "2024-02-06T00:03:05.993Z" }, - { url = "https://files.pythonhosted.org/packages/8e/02/570545bac308b58ffb21adda0f4e220ba716fb658a63c151daecc3293350/numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", size = 18061071, upload-time = "2024-02-06T00:03:41.5Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5f/fafd8c51235f60d49f7a88e2275e13971e90555b67da52dd6416caec32fe/numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", size = 15709730, upload-time = "2024-02-06T00:04:11.719Z" }, ] [[package]] @@ -2605,8 +1986,8 @@ name = "opentelemetry-api" version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, + { name = "importlib-metadata" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/63/04/05040d7ce33a907a2a02257e601992f0cdf11c73b33f13c4492bf6c3d6d5/opentelemetry_api-1.37.0.tar.gz", hash = "sha256:540735b120355bd5112738ea53621f8d5edb35ebcd6fe21ada3ab1c61d1cd9a7", size = 64923, upload-time = "2025-09-11T10:29:01.662Z" } wheels = [ @@ -2618,10 +1999,10 @@ name = "opentelemetry-instrumentation" version = "0.58b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions", marker = "python_full_version >= '3.10'" }, - { name = "packaging", marker = "python_full_version >= '3.10'" }, - { name = "wrapt", marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f6/36/7c307d9be8ce4ee7beb86d7f1d31027f2a6a89228240405a858d6e4d64f9/opentelemetry_instrumentation-0.58b0.tar.gz", hash = "sha256:df640f3ac715a3e05af145c18f527f4422c6ab6c467e40bd24d2ad75a00cb705", size = 31549, upload-time = "2025-09-11T11:42:14.084Z" } wheels = [ @@ -2633,9 +2014,9 @@ name = "opentelemetry-instrumentation-threading" version = "0.58b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-instrumentation", marker = "python_full_version >= '3.10'" }, - { name = "wrapt", marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "wrapt" }, ] sdist = { url = "https://files.pythonhosted.org/packages/70/a9/3888cb0470e6eb48ea17b6802275ae71df411edd6382b9a8e8f391936fda/opentelemetry_instrumentation_threading-0.58b0.tar.gz", hash = "sha256:f68c61f77841f9ff6270176f4d496c10addbceacd782af434d705f83e4504862", size = 8770, upload-time = "2025-09-11T11:42:56.308Z" } wheels = [ @@ -2647,9 +2028,9 @@ name = "opentelemetry-sdk" version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f4/62/2e0ca80d7fe94f0b193135375da92c640d15fe81f636658d2acf373086bc/opentelemetry_sdk-1.37.0.tar.gz", hash = "sha256:cc8e089c10953ded765b5ab5669b198bbe0af1b3f89f1007d19acd32dc46dda5", size = 170404, upload-time = "2025-09-11T10:29:11.779Z" } wheels = [ @@ -2661,8 +2042,8 @@ name = "opentelemetry-semantic-conventions" version = "0.58b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/aa/1b/90701d91e6300d9f2fb352153fb1721ed99ed1f6ea14fa992c756016e63a/opentelemetry_semantic_conventions-0.58b0.tar.gz", hash = "sha256:6bd46f51264279c433755767bb44ad00f1c9e2367e1b42af563372c5a6fa0c25", size = 129867, upload-time = "2025-09-11T10:29:12.597Z" } wheels = [ @@ -2678,15 +2059,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" }, ] -[[package]] -name = "overrides" -version = "7.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, -] - [[package]] name = "packaging" version = "25.0" @@ -2708,20 +2080,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/70/c853aec59839bceed032d52010ff5f1b8d87dc3114b762e4ba2727661a3b/pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5", size = 12580827, upload-time = "2024-09-20T13:08:42.347Z" }, - { url = "https://files.pythonhosted.org/packages/99/f2/c4527768739ffa4469b2b4fff05aa3768a478aed89a2f271a79a40eee984/pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348", size = 11303897, upload-time = "2024-09-20T13:08:45.807Z" }, - { url = "https://files.pythonhosted.org/packages/ed/12/86c1747ea27989d7a4064f806ce2bae2c6d575b950be087837bdfcabacc9/pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed", size = 66480908, upload-time = "2024-09-20T18:37:13.513Z" }, - { url = "https://files.pythonhosted.org/packages/44/50/7db2cd5e6373ae796f0ddad3675268c8d59fb6076e66f0c339d61cea886b/pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57", size = 13064210, upload-time = "2024-09-20T13:08:48.325Z" }, - { url = "https://files.pythonhosted.org/packages/61/61/a89015a6d5536cb0d6c3ba02cebed51a95538cf83472975275e28ebf7d0c/pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42", size = 16754292, upload-time = "2024-09-20T19:01:54.443Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0d/4cc7b69ce37fac07645a94e1d4b0880b15999494372c1523508511b09e40/pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f", size = 14416379, upload-time = "2024-09-20T13:08:50.882Z" }, - { url = "https://files.pythonhosted.org/packages/31/9e/6ebb433de864a6cd45716af52a4d7a8c3c9aaf3a98368e61db9e69e69a9c/pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645", size = 11598471, upload-time = "2024-09-20T13:08:53.332Z" }, - { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222, upload-time = "2024-09-20T13:08:56.254Z" }, - { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274, upload-time = "2024-09-20T13:08:58.645Z" }, - { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836, upload-time = "2024-09-20T19:01:57.571Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505, upload-time = "2024-09-20T13:09:01.501Z" }, - { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420, upload-time = "2024-09-20T19:02:00.678Z" }, - { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457, upload-time = "2024-09-20T13:09:04.105Z" }, - { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166, upload-time = "2024-09-20T13:09:06.917Z" }, { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893, upload-time = "2024-09-20T13:09:09.655Z" }, { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475, upload-time = "2024-09-20T13:09:14.718Z" }, { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645, upload-time = "2024-09-20T19:02:03.88Z" }, @@ -2742,13 +2100,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013, upload-time = "2024-09-20T13:09:44.39Z" }, { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620, upload-time = "2024-09-20T19:02:20.639Z" }, { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436, upload-time = "2024-09-20T13:09:48.112Z" }, - { url = "https://files.pythonhosted.org/packages/ca/8c/8848a4c9b8fdf5a534fe2077af948bf53cd713d77ffbcd7bd15710348fd7/pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39", size = 12595535, upload-time = "2024-09-20T13:09:51.339Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b9/5cead4f63b6d31bdefeb21a679bc5a7f4aaf262ca7e07e2bc1c341b68470/pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30", size = 11319822, upload-time = "2024-09-20T13:09:54.31Z" }, - { url = "https://files.pythonhosted.org/packages/31/af/89e35619fb573366fa68dc26dad6ad2c08c17b8004aad6d98f1a31ce4bb3/pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c", size = 15625439, upload-time = "2024-09-20T19:02:23.689Z" }, - { url = "https://files.pythonhosted.org/packages/3d/dd/bed19c2974296661493d7acc4407b1d2db4e2a482197df100f8f965b6225/pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c", size = 13068928, upload-time = "2024-09-20T13:09:56.746Z" }, - { url = "https://files.pythonhosted.org/packages/31/a3/18508e10a31ea108d746c848b5a05c0711e0278fa0d6f1c52a8ec52b80a5/pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea", size = 16783266, upload-time = "2024-09-20T19:02:26.247Z" }, - { url = "https://files.pythonhosted.org/packages/c4/a5/3429bd13d82bebc78f4d78c3945efedef63a7cd0c15c17b2eeb838d1121f/pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761", size = 14450871, upload-time = "2024-09-20T13:09:59.779Z" }, - { url = "https://files.pythonhosted.org/packages/2f/49/5c30646e96c684570925b772eac4eb0a8cb0ca590fa978f56c5d3ae73ea1/pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e", size = 11618011, upload-time = "2024-09-20T13:10:02.351Z" }, ] [[package]] @@ -2796,28 +2147,6 @@ version = "11.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/af/cb/bb5c01fcd2a69335b86c22142b2bccfc3464087efb7fd382eee5ffc7fdf7/pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", size = 47026707, upload-time = "2025-04-12T17:50:03.289Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/8b/b158ad57ed44d3cc54db8d68ad7c0a58b8fc0e4c7a3f995f9d62d5b464a1/pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047", size = 3198442, upload-time = "2025-04-12T17:47:10.666Z" }, - { url = "https://files.pythonhosted.org/packages/b1/f8/bb5d956142f86c2d6cc36704943fa761f2d2e4c48b7436fd0a85c20f1713/pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95", size = 3030553, upload-time = "2025-04-12T17:47:13.153Z" }, - { url = "https://files.pythonhosted.org/packages/22/7f/0e413bb3e2aa797b9ca2c5c38cb2e2e45d88654e5b12da91ad446964cfae/pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61", size = 4405503, upload-time = "2025-04-12T17:47:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b4/cc647f4d13f3eb837d3065824aa58b9bcf10821f029dc79955ee43f793bd/pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1", size = 4490648, upload-time = "2025-04-12T17:47:17.37Z" }, - { url = "https://files.pythonhosted.org/packages/c2/6f/240b772a3b35cdd7384166461567aa6713799b4e78d180c555bd284844ea/pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c", size = 4508937, upload-time = "2025-04-12T17:47:19.066Z" }, - { url = "https://files.pythonhosted.org/packages/f3/5e/7ca9c815ade5fdca18853db86d812f2f188212792780208bdb37a0a6aef4/pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d", size = 4599802, upload-time = "2025-04-12T17:47:21.404Z" }, - { url = "https://files.pythonhosted.org/packages/02/81/c3d9d38ce0c4878a77245d4cf2c46d45a4ad0f93000227910a46caff52f3/pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97", size = 4576717, upload-time = "2025-04-12T17:47:23.571Z" }, - { url = "https://files.pythonhosted.org/packages/42/49/52b719b89ac7da3185b8d29c94d0e6aec8140059e3d8adcaa46da3751180/pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579", size = 4654874, upload-time = "2025-04-12T17:47:25.783Z" }, - { url = "https://files.pythonhosted.org/packages/5b/0b/ede75063ba6023798267023dc0d0401f13695d228194d2242d5a7ba2f964/pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d", size = 2331717, upload-time = "2025-04-12T17:47:28.922Z" }, - { url = "https://files.pythonhosted.org/packages/ed/3c/9831da3edea527c2ed9a09f31a2c04e77cd705847f13b69ca60269eec370/pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad", size = 2676204, upload-time = "2025-04-12T17:47:31.283Z" }, - { url = "https://files.pythonhosted.org/packages/01/97/1f66ff8a1503d8cbfc5bae4dc99d54c6ec1e22ad2b946241365320caabc2/pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2", size = 2414767, upload-time = "2025-04-12T17:47:34.655Z" }, - { url = "https://files.pythonhosted.org/packages/68/08/3fbf4b98924c73037a8e8b4c2c774784805e0fb4ebca6c5bb60795c40125/pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70", size = 3198450, upload-time = "2025-04-12T17:47:37.135Z" }, - { url = "https://files.pythonhosted.org/packages/84/92/6505b1af3d2849d5e714fc75ba9e69b7255c05ee42383a35a4d58f576b16/pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf", size = 3030550, upload-time = "2025-04-12T17:47:39.345Z" }, - { url = "https://files.pythonhosted.org/packages/3c/8c/ac2f99d2a70ff966bc7eb13dacacfaab57c0549b2ffb351b6537c7840b12/pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7", size = 4415018, upload-time = "2025-04-12T17:47:41.128Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e3/0a58b5d838687f40891fff9cbaf8669f90c96b64dc8f91f87894413856c6/pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8", size = 4498006, upload-time = "2025-04-12T17:47:42.912Z" }, - { url = "https://files.pythonhosted.org/packages/21/f5/6ba14718135f08fbfa33308efe027dd02b781d3f1d5c471444a395933aac/pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600", size = 4517773, upload-time = "2025-04-12T17:47:44.611Z" }, - { url = "https://files.pythonhosted.org/packages/20/f2/805ad600fc59ebe4f1ba6129cd3a75fb0da126975c8579b8f57abeb61e80/pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788", size = 4607069, upload-time = "2025-04-12T17:47:46.46Z" }, - { url = "https://files.pythonhosted.org/packages/71/6b/4ef8a288b4bb2e0180cba13ca0a519fa27aa982875882392b65131401099/pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e", size = 4583460, upload-time = "2025-04-12T17:47:49.255Z" }, - { url = "https://files.pythonhosted.org/packages/62/ae/f29c705a09cbc9e2a456590816e5c234382ae5d32584f451c3eb41a62062/pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e", size = 4661304, upload-time = "2025-04-12T17:47:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1a/c8217b6f2f73794a5e219fbad087701f412337ae6dbb956db37d69a9bc43/pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6", size = 2331809, upload-time = "2025-04-12T17:47:54.425Z" }, - { url = "https://files.pythonhosted.org/packages/e2/72/25a8f40170dc262e86e90f37cb72cb3de5e307f75bf4b02535a61afcd519/pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193", size = 2676338, upload-time = "2025-04-12T17:47:56.535Z" }, - { url = "https://files.pythonhosted.org/packages/06/9e/76825e39efee61efea258b479391ca77d64dbd9e5804e4ad0fa453b4ba55/pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7", size = 2414918, upload-time = "2025-04-12T17:47:58.217Z" }, { url = "https://files.pythonhosted.org/packages/c7/40/052610b15a1b8961f52537cc8326ca6a881408bc2bdad0d852edeb6ed33b/pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", size = 3190185, upload-time = "2025-04-12T17:48:00.417Z" }, { url = "https://files.pythonhosted.org/packages/e5/7e/b86dbd35a5f938632093dc40d1682874c33dcfe832558fc80ca56bfcb774/pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", size = 3030306, upload-time = "2025-04-12T17:48:02.391Z" }, { url = "https://files.pythonhosted.org/packages/a4/5c/467a161f9ed53e5eab51a42923c33051bf8d1a2af4626ac04f5166e58e0c/pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", size = 4416121, upload-time = "2025-04-12T17:48:04.554Z" }, @@ -2851,54 +2180,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/92/1ca0c3f09233bd7decf8f7105a1c4e3162fb9142128c74adad0fb361b7eb/pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd", size = 2335774, upload-time = "2025-04-12T17:49:04.889Z" }, { url = "https://files.pythonhosted.org/packages/a5/ac/77525347cb43b83ae905ffe257bbe2cc6fd23acb9796639a1f56aa59d191/pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e", size = 2681895, upload-time = "2025-04-12T17:49:06.635Z" }, { url = "https://files.pythonhosted.org/packages/67/32/32dc030cfa91ca0fc52baebbba2e009bb001122a1daa8b6a79ad830b38d3/pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", size = 2417234, upload-time = "2025-04-12T17:49:08.399Z" }, - { url = "https://files.pythonhosted.org/packages/21/3a/c1835d1c7cf83559e95b4f4ed07ab0bb7acc689712adfce406b3f456e9fd/pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8", size = 3198391, upload-time = "2025-04-12T17:49:10.122Z" }, - { url = "https://files.pythonhosted.org/packages/b6/4d/dcb7a9af3fc1e8653267c38ed622605d9d1793349274b3ef7af06457e257/pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909", size = 3030573, upload-time = "2025-04-12T17:49:11.938Z" }, - { url = "https://files.pythonhosted.org/packages/9d/29/530ca098c1a1eb31d4e163d317d0e24e6d2ead907991c69ca5b663de1bc5/pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928", size = 4398677, upload-time = "2025-04-12T17:49:13.861Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ee/0e5e51db34de1690264e5f30dcd25328c540aa11d50a3bc0b540e2a445b6/pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79", size = 4484986, upload-time = "2025-04-12T17:49:15.948Z" }, - { url = "https://files.pythonhosted.org/packages/93/7d/bc723b41ce3d2c28532c47678ec988974f731b5c6fadd5b3a4fba9015e4f/pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35", size = 4501897, upload-time = "2025-04-12T17:49:17.839Z" }, - { url = "https://files.pythonhosted.org/packages/be/0b/532e31abc7389617ddff12551af625a9b03cd61d2989fa595e43c470ec67/pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb", size = 4592618, upload-time = "2025-04-12T17:49:19.7Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f0/21ed6499a6216fef753e2e2254a19d08bff3747108ba042422383f3e9faa/pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a", size = 4570493, upload-time = "2025-04-12T17:49:21.703Z" }, - { url = "https://files.pythonhosted.org/packages/68/de/17004ddb8ab855573fe1127ab0168d11378cdfe4a7ee2a792a70ff2e9ba7/pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36", size = 4647748, upload-time = "2025-04-12T17:49:23.579Z" }, - { url = "https://files.pythonhosted.org/packages/c7/23/82ecb486384bb3578115c509d4a00bb52f463ee700a5ca1be53da3c88c19/pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67", size = 2331731, upload-time = "2025-04-12T17:49:25.58Z" }, - { url = "https://files.pythonhosted.org/packages/58/bb/87efd58b3689537a623d44dbb2550ef0bb5ff6a62769707a0fe8b1a7bdeb/pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1", size = 2676346, upload-time = "2025-04-12T17:49:27.342Z" }, - { url = "https://files.pythonhosted.org/packages/80/08/dc268475b22887b816e5dcfae31bce897f524b4646bab130c2142c9b2400/pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e", size = 2414623, upload-time = "2025-04-12T17:49:29.139Z" }, - { url = "https://files.pythonhosted.org/packages/33/49/c8c21e4255b4f4a2c0c68ac18125d7f5460b109acc6dfdef1a24f9b960ef/pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156", size = 3181727, upload-time = "2025-04-12T17:49:31.898Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f1/f7255c0838f8c1ef6d55b625cfb286835c17e8136ce4351c5577d02c443b/pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772", size = 2999833, upload-time = "2025-04-12T17:49:34.2Z" }, - { url = "https://files.pythonhosted.org/packages/e2/57/9968114457bd131063da98d87790d080366218f64fa2943b65ac6739abb3/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363", size = 3437472, upload-time = "2025-04-12T17:49:36.294Z" }, - { url = "https://files.pythonhosted.org/packages/b2/1b/e35d8a158e21372ecc48aac9c453518cfe23907bb82f950d6e1c72811eb0/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0", size = 3459976, upload-time = "2025-04-12T17:49:38.988Z" }, - { url = "https://files.pythonhosted.org/packages/26/da/2c11d03b765efff0ccc473f1c4186dc2770110464f2177efaed9cf6fae01/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01", size = 3527133, upload-time = "2025-04-12T17:49:40.985Z" }, - { url = "https://files.pythonhosted.org/packages/79/1a/4e85bd7cadf78412c2a3069249a09c32ef3323650fd3005c97cca7aa21df/pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193", size = 3571555, upload-time = "2025-04-12T17:49:42.964Z" }, - { url = "https://files.pythonhosted.org/packages/69/03/239939915216de1e95e0ce2334bf17a7870ae185eb390fab6d706aadbfc0/pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013", size = 2674713, upload-time = "2025-04-12T17:49:44.944Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ad/2613c04633c7257d9481ab21d6b5364b59fc5d75faafd7cb8693523945a3/pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed", size = 3181734, upload-time = "2025-04-12T17:49:46.789Z" }, - { url = "https://files.pythonhosted.org/packages/a4/fd/dcdda4471ed667de57bb5405bb42d751e6cfdd4011a12c248b455c778e03/pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c", size = 2999841, upload-time = "2025-04-12T17:49:48.812Z" }, - { url = "https://files.pythonhosted.org/packages/ac/89/8a2536e95e77432833f0db6fd72a8d310c8e4272a04461fb833eb021bf94/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd", size = 3437470, upload-time = "2025-04-12T17:49:50.831Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8f/abd47b73c60712f88e9eda32baced7bfc3e9bd6a7619bb64b93acff28c3e/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076", size = 3460013, upload-time = "2025-04-12T17:49:53.278Z" }, - { url = "https://files.pythonhosted.org/packages/f6/20/5c0a0aa83b213b7a07ec01e71a3d6ea2cf4ad1d2c686cc0168173b6089e7/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b", size = 3527165, upload-time = "2025-04-12T17:49:55.164Z" }, - { url = "https://files.pythonhosted.org/packages/58/0e/2abab98a72202d91146abc839e10c14f7cf36166f12838ea0c4db3ca6ecb/pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f", size = 3571586, upload-time = "2025-04-12T17:49:57.171Z" }, - { url = "https://files.pythonhosted.org/packages/21/2c/5e05f58658cf49b6667762cca03d6e7d85cededde2caf2ab37b81f80e574/pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", size = 2674751, upload-time = "2025-04-12T17:49:59.628Z" }, -] - -[[package]] -name = "platformdirs" -version = "4.4.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, ] [[package]] name = "platformdirs" version = "4.5.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, @@ -2940,36 +2227,6 @@ version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, - { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, - { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, - { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, - { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, - { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, - { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, - { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, - { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, - { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, - { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, - { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, - { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, - { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, - { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, - { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, - { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, - { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, - { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, - { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, - { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, - { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, - { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, - { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, - { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, - { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, @@ -3015,21 +2272,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/0ebaec9003f5d619a7475165961f8e3083cf8644d704b60395df3601632d/propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff", size = 80277, upload-time = "2025-10-08T19:48:36.647Z" }, - { url = "https://files.pythonhosted.org/packages/34/58/04af97ac586b4ef6b9026c3fd36ee7798b737a832f5d3440a4280dcebd3a/propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb", size = 45865, upload-time = "2025-10-08T19:48:37.859Z" }, - { url = "https://files.pythonhosted.org/packages/7c/19/b65d98ae21384518b291d9939e24a8aeac4fdb5101b732576f8f7540e834/propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac", size = 47636, upload-time = "2025-10-08T19:48:39.038Z" }, - { url = "https://files.pythonhosted.org/packages/b3/0f/317048c6d91c356c7154dca5af019e6effeb7ee15fa6a6db327cc19e12b4/propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888", size = 201126, upload-time = "2025-10-08T19:48:40.774Z" }, - { url = "https://files.pythonhosted.org/packages/71/69/0b2a7a5a6ee83292b4b997dbd80549d8ce7d40b6397c1646c0d9495f5a85/propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc", size = 209837, upload-time = "2025-10-08T19:48:42.167Z" }, - { url = "https://files.pythonhosted.org/packages/a5/92/c699ac495a6698df6e497fc2de27af4b6ace10d8e76528357ce153722e45/propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a", size = 215578, upload-time = "2025-10-08T19:48:43.56Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ee/14de81c5eb02c0ee4f500b4e39c4e1bd0677c06e72379e6ab18923c773fc/propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88", size = 197187, upload-time = "2025-10-08T19:48:45.309Z" }, - { url = "https://files.pythonhosted.org/packages/1d/94/48dce9aaa6d8dd5a0859bad75158ec522546d4ac23f8e2f05fac469477dd/propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00", size = 193478, upload-time = "2025-10-08T19:48:47.743Z" }, - { url = "https://files.pythonhosted.org/packages/60/b5/0516b563e801e1ace212afde869a0596a0d7115eec0b12d296d75633fb29/propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0", size = 190650, upload-time = "2025-10-08T19:48:49.373Z" }, - { url = "https://files.pythonhosted.org/packages/24/89/e0f7d4a5978cd56f8cd67735f74052f257dc471ec901694e430f0d1572fe/propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e", size = 200251, upload-time = "2025-10-08T19:48:51.4Z" }, - { url = "https://files.pythonhosted.org/packages/06/7d/a1fac863d473876ed4406c914f2e14aa82d2f10dd207c9e16fc383cc5a24/propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781", size = 200919, upload-time = "2025-10-08T19:48:53.227Z" }, - { url = "https://files.pythonhosted.org/packages/c3/4e/f86a256ff24944cf5743e4e6c6994e3526f6acfcfb55e21694c2424f758c/propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183", size = 193211, upload-time = "2025-10-08T19:48:55.027Z" }, - { url = "https://files.pythonhosted.org/packages/6e/3f/3fbad5f4356b068f1b047d300a6ff2c66614d7030f078cd50be3fec04228/propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19", size = 38314, upload-time = "2025-10-08T19:48:56.792Z" }, - { url = "https://files.pythonhosted.org/packages/a4/45/d78d136c3a3d215677abb886785aae744da2c3005bcb99e58640c56529b1/propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f", size = 41912, upload-time = "2025-10-08T19:48:57.995Z" }, - { url = "https://files.pythonhosted.org/packages/fc/2a/b0632941f25139f4e58450b307242951f7c2717a5704977c6d5323a800af/propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938", size = 38450, upload-time = "2025-10-08T19:48:59.349Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] @@ -3082,24 +2324,6 @@ version = "20.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/ee/a7810cb9f3d6e9238e61d312076a9859bf3668fd21c69744de9532383912/pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1", size = 1125187, upload-time = "2025-04-27T12:34:23.264Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/23/77094eb8ee0dbe88441689cb6afc40ac312a1e15d3a7acc0586999518222/pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7", size = 30832591, upload-time = "2025-04-27T12:27:27.89Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d5/48cc573aff00d62913701d9fac478518f693b30c25f2c157550b0b2565cb/pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4", size = 32273686, upload-time = "2025-04-27T12:27:36.816Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae", size = 41337051, upload-time = "2025-04-27T12:27:44.4Z" }, - { url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee", size = 42404659, upload-time = "2025-04-27T12:27:51.715Z" }, - { url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20", size = 40695446, upload-time = "2025-04-27T12:27:59.643Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9", size = 42278528, upload-time = "2025-04-27T12:28:07.297Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75", size = 42918162, upload-time = "2025-04-27T12:28:15.716Z" }, - { url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8", size = 44550319, upload-time = "2025-04-27T12:28:27.026Z" }, - { url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191", size = 25770759, upload-time = "2025-04-27T12:28:33.702Z" }, - { url = "https://files.pythonhosted.org/packages/47/a2/b7930824181ceadd0c63c1042d01fa4ef63eee233934826a7a2a9af6e463/pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0", size = 30856035, upload-time = "2025-04-27T12:28:40.78Z" }, - { url = "https://files.pythonhosted.org/packages/9b/18/c765770227d7f5bdfa8a69f64b49194352325c66a5c3bb5e332dfd5867d9/pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb", size = 32309552, upload-time = "2025-04-27T12:28:47.051Z" }, - { url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232", size = 41334704, upload-time = "2025-04-27T12:28:55.064Z" }, - { url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f", size = 42399836, upload-time = "2025-04-27T12:29:02.13Z" }, - { url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab", size = 40711789, upload-time = "2025-04-27T12:29:09.951Z" }, - { url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62", size = 42301124, upload-time = "2025-04-27T12:29:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c", size = 42916060, upload-time = "2025-04-27T12:29:24.253Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3", size = 44547640, upload-time = "2025-04-27T12:29:32.782Z" }, - { url = "https://files.pythonhosted.org/packages/54/96/46613131b4727f10fd2ffa6d0d6f02efcc09a0e7374eff3b5771548aa95b/pyarrow-20.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc", size = 25781491, upload-time = "2025-04-27T12:29:38.464Z" }, { url = "https://files.pythonhosted.org/packages/a1/d6/0c10e0d54f6c13eb464ee9b67a68b8c71bcf2f67760ef5b6fbcddd2ab05f/pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:75a51a5b0eef32727a247707d4755322cb970be7e935172b6a3a9f9ae98404ba", size = 30815067, upload-time = "2025-04-27T12:29:44.384Z" }, { url = "https://files.pythonhosted.org/packages/7e/e2/04e9874abe4094a06fd8b0cbb0f1312d8dd7d707f144c2ec1e5e8f452ffa/pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:211d5e84cecc640c7a3ab900f930aaff5cd2702177e0d562d426fb7c4f737781", size = 32297128, upload-time = "2025-04-27T12:29:52.038Z" }, { url = "https://files.pythonhosted.org/packages/31/fd/c565e5dcc906a3b471a83273039cb75cb79aad4a2d4a12f76cc5ae90a4b8/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199", size = 41334890, upload-time = "2025-04-27T12:29:59.452Z" }, @@ -3127,15 +2351,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/cb/65fa110b483339add6a9bc7b6373614166b14e20375d4daa73483755f830/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c", size = 42839441, upload-time = "2025-04-27T12:32:46.64Z" }, { url = "https://files.pythonhosted.org/packages/98/7b/f30b1954589243207d7a0fbc9997401044bf9a033eec78f6cb50da3f304a/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a", size = 44503279, upload-time = "2025-04-27T12:32:56.503Z" }, { url = "https://files.pythonhosted.org/packages/37/40/ad395740cd641869a13bcf60851296c89624662575621968dcfafabaa7f6/pyarrow-20.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:82f1ee5133bd8f49d31be1299dc07f585136679666b502540db854968576faf9", size = 25944982, upload-time = "2025-04-27T12:33:04.72Z" }, - { url = "https://files.pythonhosted.org/packages/10/53/421820fa125138c868729b930d4bc487af2c4b01b1c6104818aab7e98f13/pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:1bcbe471ef3349be7714261dea28fe280db574f9d0f77eeccc195a2d161fd861", size = 30844702, upload-time = "2025-04-27T12:33:12.122Z" }, - { url = "https://files.pythonhosted.org/packages/2e/70/fd75e03312b715e90d928fb91ed8d45c9b0520346e5231b1c69293afd4c7/pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a18a14baef7d7ae49247e75641fd8bcbb39f44ed49a9fc4ec2f65d5031aa3b96", size = 32287180, upload-time = "2025-04-27T12:33:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/c4/e3/21e5758e46219fdedf5e6c800574dd9d17e962e80014cfe08d6d475be863/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc", size = 41351968, upload-time = "2025-04-27T12:33:28.215Z" }, - { url = "https://files.pythonhosted.org/packages/ac/f5/ed6a4c4b11f9215092a35097a985485bb7d879cb79d93d203494e8604f4e/pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec", size = 42415208, upload-time = "2025-04-27T12:33:37.04Z" }, - { url = "https://files.pythonhosted.org/packages/44/e5/466a63668ba25788ee8d38d55f853a60469ae7ad1cda343db9f3f45e0b0a/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5", size = 40708556, upload-time = "2025-04-27T12:33:46.483Z" }, - { url = "https://files.pythonhosted.org/packages/e8/d7/4c4d4e4cf6e53e16a519366dfe9223ee4a7a38e6e28c1c0d372b38ba3fe7/pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b", size = 42291754, upload-time = "2025-04-27T12:33:55.4Z" }, - { url = "https://files.pythonhosted.org/packages/07/d5/79effb32585b7c18897d3047a2163034f3f9c944d12f7b2fd8df6a2edc70/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d", size = 42936483, upload-time = "2025-04-27T12:34:03.694Z" }, - { url = "https://files.pythonhosted.org/packages/09/5c/f707603552c058b2e9129732de99a67befb1f13f008cc58856304a62c38b/pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619", size = 44558895, upload-time = "2025-04-27T12:34:13.26Z" }, - { url = "https://files.pythonhosted.org/packages/26/cc/1eb6a01c1bbc787f596c270c46bcd2273e35154a84afcb1d0cb4cc72457e/pyarrow-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:9965a050048ab02409fb7cbbefeedba04d3d67f2cc899eff505cc084345959ca", size = 25785667, upload-time = "2025-04-27T12:34:19.739Z" }, ] [[package]] @@ -3171,33 +2386,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2c/a5c4640dc7132540109f67fe83b566fbc7512ccf2a068cfa22a243df70c7/pydantic_core-2.41.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e63036298322e9aea1c8b7c0a6c1204d615dbf6ec0668ce5b83ff27f07404a61", size = 2113814, upload-time = "2025-10-06T21:09:50.892Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e7/a8694c3454a57842095d69c7a4ab3cf81c3c7b590f052738eabfdfc2e234/pydantic_core-2.41.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:241299ca91fc77ef64f11ed909d2d9220a01834e8e6f8de61275c4dd16b7c936", size = 1916660, upload-time = "2025-10-06T21:09:52.783Z" }, - { url = "https://files.pythonhosted.org/packages/9c/58/29f12e65b19c1877a0269eb4f23c5d2267eded6120a7d6762501ab843dc9/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab7e594a2a5c24ab8013a7dc8cfe5f2260e80e490685814122081705c2cf2b0", size = 1975071, upload-time = "2025-10-06T21:09:54.009Z" }, - { url = "https://files.pythonhosted.org/packages/98/26/4e677f2b7ec3fbdd10be6b586a82a814c8ebe3e474024c8df2d4260e564e/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b054ef1a78519cb934b58e9c90c09e93b837c935dcd907b891f2b265b129eb6e", size = 2067271, upload-time = "2025-10-06T21:09:55.175Z" }, - { url = "https://files.pythonhosted.org/packages/29/50/50614bd906089904d7ca1be3b9ecf08c00a327143d48f1decfdc21b3c302/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2ab7d10d0ab2ed6da54c757233eb0f48ebfb4f86e9b88ccecb3f92bbd61a538", size = 2253207, upload-time = "2025-10-06T21:09:56.709Z" }, - { url = "https://files.pythonhosted.org/packages/ea/58/b1e640b4ca559273cca7c28e0fe8891d5d8e9a600f5ab4882670ec107549/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2757606b7948bb853a27e4040820306eaa0ccb9e8f9f8a0fa40cb674e170f350", size = 2375052, upload-time = "2025-10-06T21:09:57.97Z" }, - { url = "https://files.pythonhosted.org/packages/53/25/cd47df3bfb24350e03835f0950288d1054f1cc9a8023401dabe6d4ff2834/pydantic_core-2.41.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec0e75eb61f606bad0a32f2be87507087514e26e8c73db6cbdb8371ccd27917", size = 2076834, upload-time = "2025-10-06T21:09:59.58Z" }, - { url = "https://files.pythonhosted.org/packages/ec/b4/71b2c77e5df527fbbc1a03e72c3fd96c44cd10d4241a81befef8c12b9fc4/pydantic_core-2.41.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0234236514f44a5bf552105cfe2543a12f48203397d9d0f866affa569345a5b5", size = 2195374, upload-time = "2025-10-06T21:10:01.18Z" }, - { url = "https://files.pythonhosted.org/packages/aa/08/4b8a50733005865efde284fec45da75fe16a258f706e16323c5ace4004eb/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1b974e41adfbb4ebb0f65fc4ca951347b17463d60893ba7d5f7b9bb087c83897", size = 2156060, upload-time = "2025-10-06T21:10:02.74Z" }, - { url = "https://files.pythonhosted.org/packages/83/c3/1037cb603ef2130c210150a51b1710d86825b5c28df54a55750099f91196/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:248dafb3204136113c383e91a4d815269f51562b6659b756cf3df14eefc7d0bb", size = 2331640, upload-time = "2025-10-06T21:10:04.39Z" }, - { url = "https://files.pythonhosted.org/packages/56/4c/52d111869610e6b1a46e1f1035abcdc94d0655587e39104433a290e9f377/pydantic_core-2.41.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:678f9d76a91d6bcedd7568bbf6beb77ae8447f85d1aeebaab7e2f0829cfc3a13", size = 2329844, upload-time = "2025-10-06T21:10:05.68Z" }, - { url = "https://files.pythonhosted.org/packages/32/5d/4b435f0b52ab543967761aca66b84ad3f0026e491e57de47693d15d0a8db/pydantic_core-2.41.1-cp310-cp310-win32.whl", hash = "sha256:dff5bee1d21ee58277900692a641925d2dddfde65182c972569b1a276d2ac8fb", size = 1991289, upload-time = "2025-10-06T21:10:07.199Z" }, - { url = "https://files.pythonhosted.org/packages/88/52/31b4deafc1d3cb96d0e7c0af70f0dc05454982d135d07f5117e6336153e8/pydantic_core-2.41.1-cp310-cp310-win_amd64.whl", hash = "sha256:5042da12e5d97d215f91567110fdfa2e2595a25f17c19b9ff024f31c34f9b53e", size = 2027747, upload-time = "2025-10-06T21:10:08.503Z" }, - { url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1", size = 2111456, upload-time = "2025-10-06T21:10:09.824Z" }, - { url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176", size = 1915012, upload-time = "2025-10-06T21:10:11.599Z" }, - { url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0", size = 1973762, upload-time = "2025-10-06T21:10:13.154Z" }, - { url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575", size = 2065386, upload-time = "2025-10-06T21:10:14.436Z" }, - { url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20", size = 2252317, upload-time = "2025-10-06T21:10:15.719Z" }, - { url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4", size = 2373405, upload-time = "2025-10-06T21:10:17.017Z" }, - { url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d", size = 2073794, upload-time = "2025-10-06T21:10:18.383Z" }, - { url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1", size = 2194430, upload-time = "2025-10-06T21:10:19.638Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9", size = 2154611, upload-time = "2025-10-06T21:10:21.28Z" }, - { url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1", size = 2329809, upload-time = "2025-10-06T21:10:22.678Z" }, - { url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea", size = 2327907, upload-time = "2025-10-06T21:10:24.447Z" }, - { url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f", size = 1989964, upload-time = "2025-10-06T21:10:25.676Z" }, - { url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298", size = 2025158, upload-time = "2025-10-06T21:10:27.522Z" }, - { url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5", size = 1972297, upload-time = "2025-10-06T21:10:28.814Z" }, { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, @@ -3229,43 +2417,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc", size = 1875626, upload-time = "2025-10-06T21:11:15.69Z" }, { url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67", size = 2045708, upload-time = "2025-10-06T21:11:17.258Z" }, { url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795", size = 1997171, upload-time = "2025-10-06T21:11:18.822Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d2/90421a4749f15aa4f06dd1d25a6419b91b181ae7994a4e7c4ed0a6415057/pydantic_core-2.41.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:10ce489cf09a4956a1549af839b983edc59b0f60e1b068c21b10154e58f54f80", size = 2114974, upload-time = "2025-10-06T21:11:53.549Z" }, - { url = "https://files.pythonhosted.org/packages/39/6a/3b5c2ba43da5380f252b35f7e74851e1379f4935c8bccbbda05992b5fe4d/pydantic_core-2.41.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff548c908caffd9455fd1342366bcf8a1ec8a3fca42f35c7fc60883d6a901074", size = 1940064, upload-time = "2025-10-06T21:11:55.268Z" }, - { url = "https://files.pythonhosted.org/packages/81/a9/050595183529316cf95d0f97662a4fe782dbea5f31dba0cf366015b67fad/pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d43bf082025082bda13be89a5f876cc2386b7727c7b322be2d2b706a45cea8e", size = 1976637, upload-time = "2025-10-06T21:11:57.024Z" }, - { url = "https://files.pythonhosted.org/packages/46/a8/846a8e466edd841c67f11f0ae738ca5c5d87968f6d8630bc449e2e6e11f2/pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:666aee751faf1c6864b2db795775dd67b61fdcf646abefa309ed1da039a97209", size = 2069274, upload-time = "2025-10-06T21:11:59.129Z" }, - { url = "https://files.pythonhosted.org/packages/4c/dc/19d01747082daf3667f952b6deee73e9e63338caa9c61442558cbdf8c876/pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83aaeff0d7bde852c32e856f3ee410842ebc08bc55c510771d87dcd1c01e1ed", size = 2255302, upload-time = "2025-10-07T10:49:36.917Z" }, - { url = "https://files.pythonhosted.org/packages/fa/99/0d4f031aeddf2cf03a5eb8eafde50147259067716c32174551b786aa72e1/pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:055c7931b0329cb8acde20cdde6d9c2cbc2a02a0a8e54a792cddd91e2ea92c65", size = 2386549, upload-time = "2025-10-07T10:49:39.385Z" }, - { url = "https://files.pythonhosted.org/packages/09/7f/027061a060718733a6c016e7d4acc864c8bb69f0092d9b3da7e3888b102f/pydantic_core-2.41.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530bbb1347e3e5ca13a91ac087c4971d7da09630ef8febd27a20a10800c2d06d", size = 2079817, upload-time = "2025-10-07T10:49:41.409Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5e/791c16d5e2a0b394c2c236f7d2556dbc381f8666bc12db7d35dc051c67e3/pydantic_core-2.41.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65a0ea16cfea7bfa9e43604c8bd726e63a3788b61c384c37664b55209fcb1d74", size = 2196276, upload-time = "2025-10-07T10:49:43.367Z" }, - { url = "https://files.pythonhosted.org/packages/a3/99/2c7010145da82fdd30955c1c0e1e75723ca7aef32b52f2565383fd2347d2/pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fa93fadff794c6d15c345c560513b160197342275c6d104cc879f932b978afc", size = 2157417, upload-time = "2025-10-07T10:49:45.176Z" }, - { url = "https://files.pythonhosted.org/packages/c6/df/b8f2ac7fa15479e989d0c2ea88e5e28eeb923096b2462804b9113bce51b5/pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c8a1af9ac51969a494c6a82b563abae6859dc082d3b999e8fa7ba5ee1b05e8e8", size = 2333884, upload-time = "2025-10-07T10:49:46.896Z" }, - { url = "https://files.pythonhosted.org/packages/60/e8/06387d852bf67402fb0129b3297aa0c358aa9647e59f795c0965a7bedefe/pydantic_core-2.41.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30edab28829703f876897c9471a857e43d847b8799c3c9e2fbce644724b50aa4", size = 2330735, upload-time = "2025-10-07T10:49:48.79Z" }, - { url = "https://files.pythonhosted.org/packages/07/41/8964006fd8897df13cb0eec374bda053d1936cbc81315acdd755d85c99d5/pydantic_core-2.41.1-cp39-cp39-win32.whl", hash = "sha256:84d0ff869f98be2e93efdf1ae31e5a15f0926d22af8677d51676e373abbfe57a", size = 1992855, upload-time = "2025-10-07T10:49:50.806Z" }, - { url = "https://files.pythonhosted.org/packages/d3/c9/0f68c22ba0cac693326a7de73f04c7543886e0b240e2320f8ced861f0c3d/pydantic_core-2.41.1-cp39-cp39-win_amd64.whl", hash = "sha256:b5674314987cdde5a5511b029fa5fb1556b3d147a367e01dd583b19cfa8e35df", size = 2030219, upload-time = "2025-10-07T10:49:52.712Z" }, - { url = "https://files.pythonhosted.org/packages/16/89/d0afad37ba25f5801735af1472e650b86baad9fe807a42076508e4824a2a/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0", size = 2124001, upload-time = "2025-10-07T10:49:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/8e/c4/08609134b34520568ddebb084d9ed0a2a3f5f52b45739e6e22cb3a7112eb/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20", size = 1941841, upload-time = "2025-10-07T10:49:56.248Z" }, - { url = "https://files.pythonhosted.org/packages/2a/43/94a4877094e5fe19a3f37e7e817772263e2c573c94f1e3fa2b1eee56ef3b/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d", size = 1961129, upload-time = "2025-10-07T10:49:58.298Z" }, - { url = "https://files.pythonhosted.org/packages/a2/30/23a224d7e25260eb5f69783a63667453037e07eb91ff0e62dabaadd47128/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a", size = 2148770, upload-time = "2025-10-07T10:49:59.959Z" }, { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, - { url = "https://files.pythonhosted.org/packages/d4/31/f403d7ca8352e3e4df352ccacd200f5f7f7fe81cef8e458515f015091625/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fabcbdb12de6eada8d6e9a759097adb3c15440fafc675b3e94ae5c9cb8d678a0", size = 2114268, upload-time = "2025-10-07T10:50:10.257Z" }, - { url = "https://files.pythonhosted.org/packages/6e/b5/334473b6d2810df84db67f03d4f666acacfc538512c2d2a254074fee0889/pydantic_core-2.41.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e97ccfaf0aaf67d55de5085b0ed0d994f57747d9d03f2de5cc9847ca737b08", size = 1935786, upload-time = "2025-10-07T10:50:12.333Z" }, - { url = "https://files.pythonhosted.org/packages/ea/5e/45513e4dc621f47397cfa5fef12ba8fa5e8b1c4c07f2ff2a5fef8ff81b25/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34df1fe8fea5d332484a763702e8b6a54048a9d4fe6ccf41e34a128238e01f52", size = 1971995, upload-time = "2025-10-07T10:50:14.071Z" }, - { url = "https://files.pythonhosted.org/packages/22/e3/f1797c168e5f52b973bed1c585e99827a22d5e579d1ed57d51bc15b14633/pydantic_core-2.41.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:421b5595f845842fc093f7250e24ee395f54ca62d494fdde96f43ecf9228ae01", size = 2191264, upload-time = "2025-10-07T10:50:15.788Z" }, - { url = "https://files.pythonhosted.org/packages/bb/e1/24ef4c3b4ab91c21c3a09a966c7d2cffe101058a7bfe5cc8b2c7c7d574e2/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dce8b22663c134583aaad24827863306a933f576c79da450be3984924e2031d1", size = 2152430, upload-time = "2025-10-07T10:50:18.018Z" }, - { url = "https://files.pythonhosted.org/packages/35/74/70c1e225d67f7ef3fdba02c506d9011efaf734020914920b2aa3d1a45e61/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:300a9c162fea9906cc5c103893ca2602afd84f0ec90d3be36f4cc360125d22e1", size = 2324691, upload-time = "2025-10-07T10:50:19.801Z" }, - { url = "https://files.pythonhosted.org/packages/c8/bf/dd4d21037c8bef0d8cce90a86a3f2dcb011c30086db2a10113c3eea23eba/pydantic_core-2.41.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e019167628f6e6161ae7ab9fb70f6d076a0bf0d55aa9b20833f86a320c70dd65", size = 2324493, upload-time = "2025-10-07T10:50:21.568Z" }, - { url = "https://files.pythonhosted.org/packages/7e/78/3093b334e9c9796c8236a4701cd2ddef1c56fb0928fe282a10c797644380/pydantic_core-2.41.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:13ab9cc2de6f9d4ab645a050ae5aee61a2424ac4d3a16ba23d4c2027705e0301", size = 2146156, upload-time = "2025-10-07T10:50:23.475Z" }, - { url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1", size = 2112094, upload-time = "2025-10-07T10:50:25.513Z" }, - { url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696", size = 1935300, upload-time = "2025-10-07T10:50:27.715Z" }, - { url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222", size = 1970417, upload-time = "2025-10-07T10:50:29.573Z" }, - { url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2", size = 2190745, upload-time = "2025-10-07T10:50:31.48Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506", size = 2150888, upload-time = "2025-10-07T10:50:33.477Z" }, - { url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656", size = 2324489, upload-time = "2025-10-07T10:50:36.48Z" }, - { url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e", size = 2322763, upload-time = "2025-10-07T10:50:38.751Z" }, - { url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb", size = 2144046, upload-time = "2025-10-07T10:50:40.686Z" }, ] [[package]] @@ -3273,9 +2428,9 @@ name = "pydantic-settings" version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pydantic", marker = "python_full_version >= '3.10'" }, - { name = "python-dotenv", marker = "python_full_version >= '3.10'" }, - { name = "typing-inspection", marker = "python_full_version >= '3.10'" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, ] sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ @@ -3321,12 +2476,10 @@ version = "8.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ @@ -3338,7 +2491,6 @@ name = "pytest-asyncio" version = "1.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, { name = "pytest" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] @@ -3352,7 +2504,7 @@ name = "pytest-cov" version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coverage", extra = ["toml"] }, + { name = "coverage" }, { name = "pluggy" }, { name = "pytest" }, ] @@ -3412,9 +2564,6 @@ wheels = [ name = "python-json-logger" version = "4.0.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, @@ -3452,21 +2601,12 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, - { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, - { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, - { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, - { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, - { url = "https://files.pythonhosted.org/packages/59/42/b86689aac0cdaee7ae1c58d464b0ff04ca909c19bb6502d4973cdd9f9544/pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b", size = 8760837, upload-time = "2025-07-14T20:12:59.59Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8a/1403d0353f8c5a2f0829d2b1c4becbf9da2f0a4d040886404fc4a5431e4d/pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91", size = 9590187, upload-time = "2025-07-14T20:13:01.419Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/e0e8d802f124772cec9c75430b01a212f86f9de7546bda715e54140d5aeb/pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d", size = 8778162, upload-time = "2025-07-14T20:13:03.544Z" }, ] [[package]] @@ -3475,12 +2615,9 @@ version = "3.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669, upload-time = "2025-10-03T21:16:29.205Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/f5/b17ae550841949c217ad557ee445b4a14e9c0b506ae51ee087eff53428a6/pywinpty-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:65db57fd3387d71e8372b6a54269cbcd0f6dfa6d4616a29e0af749ec19f5c558", size = 2050330, upload-time = "2025-10-03T21:20:15.656Z" }, - { url = "https://files.pythonhosted.org/packages/a6/a1/409c1651c9f874d598c10f51ff586c416625601df4bca315d08baec4c3e3/pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23", size = 2050304, upload-time = "2025-10-03T21:19:29.466Z" }, { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391, upload-time = "2025-10-03T21:19:01.642Z" }, { url = "https://files.pythonhosted.org/packages/fc/19/b757fe28008236a4a713e813283721b8a40aa60cd7d3f83549f2e25a3155/pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51", size = 2050057, upload-time = "2025-10-03T21:19:26.732Z" }, { url = "https://files.pythonhosted.org/packages/cb/44/cbae12ecf6f4fa4129c36871fd09c6bef4f98d5f625ecefb5e2449765508/pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b", size = 2049874, upload-time = "2025-10-03T21:18:53.923Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ea/5cc069afc60f6dd5bc99b3e51fb8b219f10bcf5674882fc5d6dd2186d3aa/pywinpty-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:3962daf801bc38dd4de872108c424b5338c9a46c6efca5761854cd66370a9022", size = 2052447, upload-time = "2025-10-03T21:18:51.457Z" }, ] [[package]] @@ -3489,24 +2626,6 @@ version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, @@ -3525,15 +2644,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, - { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, - { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, - { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, - { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, - { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, ] [[package]] @@ -3545,26 +2655,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/b9/52aa9ec2867528b54f1e60846728d8b4d84726630874fee3a91e66c7df81/pyzmq-27.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:508e23ec9bc44c0005c4946ea013d9317ae00ac67778bd47519fdf5a0e930ff4", size = 1329850, upload-time = "2025-09-08T23:07:26.274Z" }, - { url = "https://files.pythonhosted.org/packages/99/64/5653e7b7425b169f994835a2b2abf9486264401fdef18df91ddae47ce2cc/pyzmq-27.1.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:507b6f430bdcf0ee48c0d30e734ea89ce5567fd7b8a0f0044a369c176aa44556", size = 906380, upload-time = "2025-09-08T23:07:29.78Z" }, - { url = "https://files.pythonhosted.org/packages/73/78/7d713284dbe022f6440e391bd1f3c48d9185673878034cfb3939cdf333b2/pyzmq-27.1.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf7b38f9fd7b81cb6d9391b2946382c8237fd814075c6aa9c3b746d53076023b", size = 666421, upload-time = "2025-09-08T23:07:31.263Z" }, - { url = "https://files.pythonhosted.org/packages/30/76/8f099f9d6482450428b17c4d6b241281af7ce6a9de8149ca8c1c649f6792/pyzmq-27.1.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03ff0b279b40d687691a6217c12242ee71f0fba28bf8626ff50e3ef0f4410e1e", size = 854149, upload-time = "2025-09-08T23:07:33.17Z" }, - { url = "https://files.pythonhosted.org/packages/59/f0/37fbfff06c68016019043897e4c969ceab18bde46cd2aca89821fcf4fb2e/pyzmq-27.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:677e744fee605753eac48198b15a2124016c009a11056f93807000ab11ce6526", size = 1655070, upload-time = "2025-09-08T23:07:35.205Z" }, - { url = "https://files.pythonhosted.org/packages/47/14/7254be73f7a8edc3587609554fcaa7bfd30649bf89cd260e4487ca70fdaa/pyzmq-27.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd2fec2b13137416a1c5648b7009499bcc8fea78154cd888855fa32514f3dad1", size = 2033441, upload-time = "2025-09-08T23:07:37.432Z" }, - { url = "https://files.pythonhosted.org/packages/22/dc/49f2be26c6f86f347e796a4d99b19167fc94503f0af3fd010ad262158822/pyzmq-27.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08e90bb4b57603b84eab1d0ca05b3bbb10f60c1839dc471fc1c9e1507bef3386", size = 1891529, upload-time = "2025-09-08T23:07:39.047Z" }, - { url = "https://files.pythonhosted.org/packages/a3/3e/154fb963ae25be70c0064ce97776c937ecc7d8b0259f22858154a9999769/pyzmq-27.1.0-cp310-cp310-win32.whl", hash = "sha256:a5b42d7a0658b515319148875fcb782bbf118dd41c671b62dae33666c2213bda", size = 567276, upload-time = "2025-09-08T23:07:40.695Z" }, - { url = "https://files.pythonhosted.org/packages/62/b2/f4ab56c8c595abcb26b2be5fd9fa9e6899c1e5ad54964e93ae8bb35482be/pyzmq-27.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0bb87227430ee3aefcc0ade2088100e528d5d3298a0a715a64f3d04c60ba02f", size = 632208, upload-time = "2025-09-08T23:07:42.298Z" }, - { url = "https://files.pythonhosted.org/packages/3b/e3/be2cc7ab8332bdac0522fdb64c17b1b6241a795bee02e0196636ec5beb79/pyzmq-27.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:9a916f76c2ab8d045b19f2286851a38e9ac94ea91faf65bd64735924522a8b32", size = 559766, upload-time = "2025-09-08T23:07:43.869Z" }, - { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" }, - { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" }, - { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" }, - { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" }, - { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" }, - { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" }, - { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" }, - { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" }, - { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" }, - { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" }, { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, @@ -3587,31 +2677,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" }, { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" }, { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" }, - { url = "https://files.pythonhosted.org/packages/ac/4e/782eb6df91b6a9d9afa96c2dcfc5cac62562a68eb62a02210101f886014d/pyzmq-27.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:96c71c32fff75957db6ae33cd961439f386505c6e6b377370af9b24a1ef9eafb", size = 1330426, upload-time = "2025-09-08T23:09:21.03Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ca/2b8693d06b1db4e0c084871e4c9d7842b561d0a6ff9d780640f5e3e9eb55/pyzmq-27.1.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:49d3980544447f6bd2968b6ac913ab963a49dcaa2d4a2990041f16057b04c429", size = 906559, upload-time = "2025-09-08T23:09:22.983Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b3/b99b39e2cfdcebd512959780e4d299447fd7f46010b1d88d63324e2481ec/pyzmq-27.1.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:849ca054d81aa1c175c49484afaaa5db0622092b5eccb2055f9f3bb8f703782d", size = 863816, upload-time = "2025-09-08T23:09:24.556Z" }, - { url = "https://files.pythonhosted.org/packages/61/b2/018fa8e8eefb34a625b1a45e2effcbc9885645b22cdd0a68283f758351e7/pyzmq-27.1.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3970778e74cb7f85934d2b926b9900e92bfe597e62267d7499acc39c9c28e345", size = 666735, upload-time = "2025-09-08T23:09:26.297Z" }, - { url = "https://files.pythonhosted.org/packages/01/05/8ae778f7cd7c94030731ae2305e6a38f3a333b6825f56c0c03f2134ccf1b/pyzmq-27.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:da96ecdcf7d3919c3be2de91a8c513c186f6762aa6cf7c01087ed74fad7f0968", size = 1655425, upload-time = "2025-09-08T23:09:28.172Z" }, - { url = "https://files.pythonhosted.org/packages/ad/ad/d69478a97a3f3142f9dbbbd9daa4fcf42541913a85567c36d4cfc19b2218/pyzmq-27.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9541c444cfe1b1c0156c5c86ece2bb926c7079a18e7b47b0b1b3b1b875e5d098", size = 2033729, upload-time = "2025-09-08T23:09:30.097Z" }, - { url = "https://files.pythonhosted.org/packages/9a/6d/e3c6ad05bc1cddd25094e66cc15ae8924e15c67e231e93ed2955c401007e/pyzmq-27.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e30a74a39b93e2e1591b58eb1acef4902be27c957a8720b0e368f579b82dc22f", size = 1891803, upload-time = "2025-09-08T23:09:31.875Z" }, - { url = "https://files.pythonhosted.org/packages/7f/a7/97e8be0daaca157511563160b67a13d4fe76b195e3fa6873cb554ad46be3/pyzmq-27.1.0-cp39-cp39-win32.whl", hash = "sha256:b1267823d72d1e40701dcba7edc45fd17f71be1285557b7fe668887150a14b78", size = 567627, upload-time = "2025-09-08T23:09:33.98Z" }, - { url = "https://files.pythonhosted.org/packages/5c/91/70bbf3a7c5b04c904261ef5ba224d8a76315f6c23454251bf5f55573a8a1/pyzmq-27.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c996ded912812a2fcd7ab6574f4ad3edc27cb6510349431e4930d4196ade7db", size = 632315, upload-time = "2025-09-08T23:09:36.097Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b5/a4173a83c7fd37f6bdb5a800ea338bc25603284e9ef8681377cec006ede4/pyzmq-27.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:346e9ba4198177a07e7706050f35d733e08c1c1f8ceacd5eb6389d653579ffbc", size = 559833, upload-time = "2025-09-08T23:09:38.183Z" }, - { url = "https://files.pythonhosted.org/packages/f3/81/a65e71c1552f74dec9dff91d95bafb6e0d33338a8dfefbc88aa562a20c92/pyzmq-27.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c17e03cbc9312bee223864f1a2b13a99522e0dc9f7c5df0177cd45210ac286e6", size = 836266, upload-time = "2025-09-08T23:09:40.048Z" }, - { url = "https://files.pythonhosted.org/packages/58/ed/0202ca350f4f2b69faa95c6d931e3c05c3a397c184cacb84cb4f8f42f287/pyzmq-27.1.0-pp310-pypy310_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f328d01128373cb6763823b2b4e7f73bdf767834268c565151eacb3b7a392f90", size = 800206, upload-time = "2025-09-08T23:09:41.902Z" }, - { url = "https://files.pythonhosted.org/packages/47/42/1ff831fa87fe8f0a840ddb399054ca0009605d820e2b44ea43114f5459f4/pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c1790386614232e1b3a40a958454bdd42c6d1811837b15ddbb052a032a43f62", size = 567747, upload-time = "2025-09-08T23:09:43.741Z" }, - { url = "https://files.pythonhosted.org/packages/d1/db/5c4d6807434751e3f21231bee98109aa57b9b9b55e058e450d0aef59b70f/pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:448f9cb54eb0cee4732b46584f2710c8bc178b0e5371d9e4fc8125201e413a74", size = 747371, upload-time = "2025-09-08T23:09:45.575Z" }, - { url = "https://files.pythonhosted.org/packages/26/af/78ce193dbf03567eb8c0dc30e3df2b9e56f12a670bf7eb20f9fb532c7e8a/pyzmq-27.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:05b12f2d32112bf8c95ef2e74ec4f1d4beb01f8b5e703b38537f8849f92cb9ba", size = 544862, upload-time = "2025-09-08T23:09:47.448Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" }, - { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" }, - { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" }, - { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" }, - { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" }, - { url = "https://files.pythonhosted.org/packages/57/f4/c2e978cf6b833708bad7d6396c3a20c19750585a1775af3ff13c435e1912/pyzmq-27.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:722ea791aa233ac0a819fc2c475e1292c76930b31f1d828cb61073e2fe5e208f", size = 836257, upload-time = "2025-09-08T23:10:07.635Z" }, - { url = "https://files.pythonhosted.org/packages/5f/5f/4e10c7f57a4c92ab0fbb2396297aa8d618e6f5b9b8f8e9756d56f3e6fc52/pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:01f9437501886d3a1dd4b02ef59fb8cc384fa718ce066d52f175ee49dd5b7ed8", size = 800203, upload-time = "2025-09-08T23:10:09.436Z" }, - { url = "https://files.pythonhosted.org/packages/19/72/a74a007cd636f903448c6ab66628104b1fc5f2ba018733d5eabb94a0a6fb/pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a19387a3dddcc762bfd2f570d14e2395b2c9701329b266f83dd87a2b3cbd381", size = 758756, upload-time = "2025-09-08T23:10:11.733Z" }, - { url = "https://files.pythonhosted.org/packages/a9/d4/30c25b91f2b4786026372f5ef454134d7f576fcf4ac58539ad7dd5de4762/pyzmq-27.1.0-pp39-pypy39_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c618fbcd069e3a29dcd221739cacde52edcc681f041907867e0f5cc7e85f172", size = 567742, upload-time = "2025-09-08T23:10:14.732Z" }, - { url = "https://files.pythonhosted.org/packages/92/aa/ee86edad943438cd0316964020c4b6d09854414f9f945f8e289ea6fcc019/pyzmq-27.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff8d114d14ac671d88c89b9224c63d6c4e5a613fe8acd5594ce53d752a3aafe9", size = 544857, upload-time = "2025-09-08T23:10:16.431Z" }, +] + +[[package]] +name = "rapidfuzz" +version = "3.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/28/9d808fe62375b9aab5ba92fa9b29371297b067c2790b2d7cda648b1e2f8d/rapidfuzz-3.14.3.tar.gz", hash = "sha256:2491937177868bc4b1e469087601d53f925e8d270ccc21e07404b4b5814b7b5f", size = 57863900, upload-time = "2025-11-01T11:54:52.321Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/8e/3c215e860b458cfbedb3ed73bc72e98eb7e0ed72f6b48099604a7a3260c2/rapidfuzz-3.14.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:685c93ea961d135893b5984a5a9851637d23767feabe414ec974f43babbd8226", size = 1945306, upload-time = "2025-11-01T11:53:06.452Z" }, + { url = "https://files.pythonhosted.org/packages/36/d9/31b33512015c899f4a6e6af64df8dfe8acddf4c8b40a4b3e0e6e1bcd00e5/rapidfuzz-3.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa7c8f26f009f8c673fbfb443792f0cf8cf50c4e18121ff1e285b5e08a94fbdb", size = 1390788, upload-time = "2025-11-01T11:53:08.721Z" }, + { url = "https://files.pythonhosted.org/packages/a9/67/2ee6f8de6e2081ccd560a571d9c9063184fe467f484a17fa90311a7f4a2e/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57f878330c8d361b2ce76cebb8e3e1dc827293b6abf404e67d53260d27b5d941", size = 1374580, upload-time = "2025-11-01T11:53:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/80d22997acd928eda7deadc19ccd15883904622396d6571e935993e0453a/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c5f545f454871e6af05753a0172849c82feaf0f521c5ca62ba09e1b382d6382", size = 3154947, upload-time = "2025-11-01T11:53:12.093Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cf/9f49831085a16384695f9fb096b99662f589e30b89b4a589a1ebc1a19d34/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:07aa0b5d8863e3151e05026a28e0d924accf0a7a3b605da978f0359bb804df43", size = 1223872, upload-time = "2025-11-01T11:53:13.664Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0f/41ee8034e744b871c2e071ef0d360686f5ccfe5659f4fd96c3ec406b3c8b/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73b07566bc7e010e7b5bd490fb04bb312e820970180df6b5655e9e6224c137db", size = 2392512, upload-time = "2025-11-01T11:53:15.109Z" }, + { url = "https://files.pythonhosted.org/packages/da/86/280038b6b0c2ccec54fb957c732ad6b41cc1fd03b288d76545b9cf98343f/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6de00eb84c71476af7d3110cf25d8fe7c792d7f5fa86764ef0b4ca97e78ca3ed", size = 2521398, upload-time = "2025-11-01T11:53:17.146Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7b/05c26f939607dca0006505e3216248ae2de631e39ef94dd63dbbf0860021/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7843a1abf0091773a530636fdd2a49a41bcae22f9910b86b4f903e76ddc82dc", size = 4259416, upload-time = "2025-11-01T11:53:19.34Z" }, + { url = "https://files.pythonhosted.org/packages/40/eb/9e3af4103d91788f81111af1b54a28de347cdbed8eaa6c91d5e98a889aab/rapidfuzz-3.14.3-cp312-cp312-win32.whl", hash = "sha256:dea97ac3ca18cd3ba8f3d04b5c1fe4aa60e58e8d9b7793d3bd595fdb04128d7a", size = 1709527, upload-time = "2025-11-01T11:53:20.949Z" }, + { url = "https://files.pythonhosted.org/packages/b8/63/d06ecce90e2cf1747e29aeab9f823d21e5877a4c51b79720b2d3be7848f8/rapidfuzz-3.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:b5100fd6bcee4d27f28f4e0a1c6b5127bc8ba7c2a9959cad9eab0bf4a7ab3329", size = 1538989, upload-time = "2025-11-01T11:53:22.428Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6d/beee32dcda64af8128aab3ace2ccb33d797ed58c434c6419eea015fec779/rapidfuzz-3.14.3-cp312-cp312-win_arm64.whl", hash = "sha256:4e49c9e992bc5fc873bd0fff7ef16a4405130ec42f2ce3d2b735ba5d3d4eb70f", size = 811161, upload-time = "2025-11-01T11:53:23.811Z" }, + { url = "https://files.pythonhosted.org/packages/e4/4f/0d94d09646853bd26978cb3a7541b6233c5760687777fa97da8de0d9a6ac/rapidfuzz-3.14.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbcb726064b12f356bf10fffdb6db4b6dce5390b23627c08652b3f6e49aa56ae", size = 1939646, upload-time = "2025-11-01T11:53:25.292Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/f96aefc00f3bbdbab9c0657363ea8437a207d7545ac1c3789673e05d80bd/rapidfuzz-3.14.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1704fc70d214294e554a2421b473779bcdeef715881c5e927dc0f11e1692a0ff", size = 1385512, upload-time = "2025-11-01T11:53:27.594Z" }, + { url = "https://files.pythonhosted.org/packages/26/34/71c4f7749c12ee223dba90017a5947e8f03731a7cc9f489b662a8e9e643d/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc65e72790ddfd310c2c8912b45106e3800fefe160b0c2ef4d6b6fec4e826457", size = 1373571, upload-time = "2025-11-01T11:53:29.096Z" }, + { url = "https://files.pythonhosted.org/packages/32/00/ec8597a64f2be301ce1ee3290d067f49f6a7afb226b67d5f15b56d772ba5/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e38c1305cffae8472572a0584d4ffc2f130865586a81038ca3965301f7c97c", size = 3156759, upload-time = "2025-11-01T11:53:30.777Z" }, + { url = "https://files.pythonhosted.org/packages/61/d5/b41eeb4930501cc899d5a9a7b5c9a33d85a670200d7e81658626dcc0ecc0/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:e195a77d06c03c98b3fc06b8a28576ba824392ce40de8c708f96ce04849a052e", size = 1222067, upload-time = "2025-11-01T11:53:32.334Z" }, + { url = "https://files.pythonhosted.org/packages/2a/7d/6d9abb4ffd1027c6ed837b425834f3bed8344472eb3a503ab55b3407c721/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b7ef2f4b8583a744338a18f12c69693c194fb6777c0e9ada98cd4d9e8f09d10", size = 2394775, upload-time = "2025-11-01T11:53:34.24Z" }, + { url = "https://files.pythonhosted.org/packages/15/ce/4f3ab4c401c5a55364da1ffff8cc879fc97b4e5f4fa96033827da491a973/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a2135b138bcdcb4c3742d417f215ac2d8c2b87bde15b0feede231ae95f09ec41", size = 2526123, upload-time = "2025-11-01T11:53:35.779Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4b/54f804975376a328f57293bd817c12c9036171d15cf7292032e3f5820b2d/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33a325ed0e8e1aa20c3e75f8ab057a7b248fdea7843c2a19ade0008906c14af0", size = 4262874, upload-time = "2025-11-01T11:53:37.866Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b6/958db27d8a29a50ee6edd45d33debd3ce732e7209183a72f57544cd5fe22/rapidfuzz-3.14.3-cp313-cp313-win32.whl", hash = "sha256:8383b6d0d92f6cd008f3c9216535be215a064b2cc890398a678b56e6d280cb63", size = 1707972, upload-time = "2025-11-01T11:53:39.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/75/fde1f334b0cec15b5946d9f84d73250fbfcc73c236b4bc1b25129d90876b/rapidfuzz-3.14.3-cp313-cp313-win_amd64.whl", hash = "sha256:e6b5e3036976f0fde888687d91be86d81f9ac5f7b02e218913c38285b756be6c", size = 1537011, upload-time = "2025-11-01T11:53:40.92Z" }, + { url = "https://files.pythonhosted.org/packages/2e/d7/d83fe001ce599dc7ead57ba1debf923dc961b6bdce522b741e6b8c82f55c/rapidfuzz-3.14.3-cp313-cp313-win_arm64.whl", hash = "sha256:7ba009977601d8b0828bfac9a110b195b3e4e79b350dcfa48c11269a9f1918a0", size = 810744, upload-time = "2025-11-01T11:53:42.723Z" }, + { url = "https://files.pythonhosted.org/packages/92/13/a486369e63ff3c1a58444d16b15c5feb943edd0e6c28a1d7d67cb8946b8f/rapidfuzz-3.14.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0a28add871425c2fe94358c6300bbeb0bc2ed828ca003420ac6825408f5a424", size = 1967702, upload-time = "2025-11-01T11:53:44.554Z" }, + { url = "https://files.pythonhosted.org/packages/f1/82/efad25e260b7810f01d6b69122685e355bed78c94a12784bac4e0beb2afb/rapidfuzz-3.14.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:010e12e2411a4854b0434f920e72b717c43f8ec48d57e7affe5c42ecfa05dd0e", size = 1410702, upload-time = "2025-11-01T11:53:46.066Z" }, + { url = "https://files.pythonhosted.org/packages/ba/1a/34c977b860cde91082eae4a97ae503f43e0d84d4af301d857679b66f9869/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cfc3d57abd83c734d1714ec39c88a34dd69c85474918ebc21296f1e61eb5ca8", size = 1382337, upload-time = "2025-11-01T11:53:47.62Z" }, + { url = "https://files.pythonhosted.org/packages/88/74/f50ea0e24a5880a9159e8fd256b84d8f4634c2f6b4f98028bdd31891d907/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89acb8cbb52904f763e5ac238083b9fc193bed8d1f03c80568b20e4cef43a519", size = 3165563, upload-time = "2025-11-01T11:53:49.216Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7a/e744359404d7737049c26099423fc54bcbf303de5d870d07d2fb1410f567/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_31_armv7l.whl", hash = "sha256:7d9af908c2f371bfb9c985bd134e295038e3031e666e4b2ade1e7cb7f5af2f1a", size = 1214727, upload-time = "2025-11-01T11:53:50.883Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2e/87adfe14ce75768ec6c2b8acd0e05e85e84be4be5e3d283cdae360afc4fe/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1f1925619627f8798f8c3a391d81071336942e5fe8467bc3c567f982e7ce2897", size = 2403349, upload-time = "2025-11-01T11:53:52.322Z" }, + { url = "https://files.pythonhosted.org/packages/70/17/6c0b2b2bff9c8b12e12624c07aa22e922b0c72a490f180fa9183d1ef2c75/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:152555187360978119e98ce3e8263d70dd0c40c7541193fc302e9b7125cf8f58", size = 2507596, upload-time = "2025-11-01T11:53:53.835Z" }, + { url = "https://files.pythonhosted.org/packages/c3/d1/87852a7cbe4da7b962174c749a47433881a63a817d04f3e385ea9babcd9e/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52619d25a09546b8db078981ca88939d72caa6b8701edd8b22e16482a38e799f", size = 4273595, upload-time = "2025-11-01T11:53:55.961Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/1d0354b7d1771a28fa7fe089bc23acec2bdd3756efa2419f463e3ed80e16/rapidfuzz-3.14.3-cp313-cp313t-win32.whl", hash = "sha256:489ce98a895c98cad284f0a47960c3e264c724cb4cfd47a1430fa091c0c25204", size = 1757773, upload-time = "2025-11-01T11:53:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0c/71ef356adc29e2bdf74cd284317b34a16b80258fa0e7e242dd92cc1e6d10/rapidfuzz-3.14.3-cp313-cp313t-win_amd64.whl", hash = "sha256:656e52b054d5b5c2524169240e50cfa080b04b1c613c5f90a2465e84888d6f15", size = 1576797, upload-time = "2025-11-01T11:53:59.455Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d2/0e64fc27bb08d4304aa3d11154eb5480bcf5d62d60140a7ee984dc07468a/rapidfuzz-3.14.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c7e40c0a0af02ad6e57e89f62bef8604f55a04ecae90b0ceeda591bbf5923317", size = 829940, upload-time = "2025-11-01T11:54:01.1Z" }, ] [[package]] @@ -3636,8 +2742,7 @@ dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, - { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } wheels = [ @@ -3651,8 +2756,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, { name = "requests" }, - { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/95/89c054ad70bfef6da605338b009b2e283485835351a9935c7bfbfaca7ffc/responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4", size = 79320, upload-time = "2025-08-08T19:01:46.709Z" } wheels = [ @@ -3697,8 +2801,7 @@ name = "rich" version = "14.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "markdown-it-py" }, { name = "pygments" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } @@ -3712,35 +2815,6 @@ version = "0.27.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, - { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, - { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, - { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, - { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, - { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, - { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, - { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, - { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, - { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, - { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, - { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, - { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, - { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, - { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, - { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, - { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, - { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, - { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, - { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, - { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, - { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, - { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, - { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, @@ -3785,58 +2859,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, - { url = "https://files.pythonhosted.org/packages/7f/6c/252e83e1ce7583c81f26d1d884b2074d40a13977e1b6c9c50bbf9a7f1f5a/rpds_py-0.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c918c65ec2e42c2a78d19f18c553d77319119bf43aa9e2edf7fb78d624355527", size = 372140, upload-time = "2025-08-27T12:15:05.441Z" }, - { url = "https://files.pythonhosted.org/packages/9d/71/949c195d927c5aeb0d0629d329a20de43a64c423a6aa53836290609ef7ec/rpds_py-0.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fea2b1a922c47c51fd07d656324531adc787e415c8b116530a1d29c0516c62d", size = 354086, upload-time = "2025-08-27T12:15:07.404Z" }, - { url = "https://files.pythonhosted.org/packages/9f/02/e43e332ad8ce4f6c4342d151a471a7f2900ed1d76901da62eb3762663a71/rpds_py-0.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbf94c58e8e0cd6b6f38d8de67acae41b3a515c26169366ab58bdca4a6883bb8", size = 382117, upload-time = "2025-08-27T12:15:09.275Z" }, - { url = "https://files.pythonhosted.org/packages/d0/05/b0fdeb5b577197ad72812bbdfb72f9a08fa1e64539cc3940b1b781cd3596/rpds_py-0.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2a8fed130ce946d5c585eddc7c8eeef0051f58ac80a8ee43bd17835c144c2cc", size = 394520, upload-time = "2025-08-27T12:15:10.727Z" }, - { url = "https://files.pythonhosted.org/packages/67/1f/4cfef98b2349a7585181e99294fa2a13f0af06902048a5d70f431a66d0b9/rpds_py-0.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:037a2361db72ee98d829bc2c5b7cc55598ae0a5e0ec1823a56ea99374cfd73c1", size = 522657, upload-time = "2025-08-27T12:15:12.613Z" }, - { url = "https://files.pythonhosted.org/packages/44/55/ccf37ddc4c6dce7437b335088b5ca18da864b334890e2fe9aa6ddc3f79a9/rpds_py-0.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5281ed1cc1d49882f9997981c88df1a22e140ab41df19071222f7e5fc4e72125", size = 402967, upload-time = "2025-08-27T12:15:14.113Z" }, - { url = "https://files.pythonhosted.org/packages/74/e5/5903f92e41e293b07707d5bf00ef39a0eb2af7190aff4beaf581a6591510/rpds_py-0.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd50659a069c15eef8aa3d64bbef0d69fd27bb4a50c9ab4f17f83a16cbf8905", size = 384372, upload-time = "2025-08-27T12:15:15.842Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e3/fbb409e18aeefc01e49f5922ac63d2d914328430e295c12183ce56ebf76b/rpds_py-0.27.1-cp39-cp39-manylinux_2_31_riscv64.whl", hash = "sha256:c4b676c4ae3921649a15d28ed10025548e9b561ded473aa413af749503c6737e", size = 401264, upload-time = "2025-08-27T12:15:17.388Z" }, - { url = "https://files.pythonhosted.org/packages/55/79/529ad07794e05cb0f38e2f965fc5bb20853d523976719400acecc447ec9d/rpds_py-0.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:079bc583a26db831a985c5257797b2b5d3affb0386e7ff886256762f82113b5e", size = 418691, upload-time = "2025-08-27T12:15:19.144Z" }, - { url = "https://files.pythonhosted.org/packages/33/39/6554a7fd6d9906fda2521c6d52f5d723dca123529fb719a5b5e074c15e01/rpds_py-0.27.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4e44099bd522cba71a2c6b97f68e19f40e7d85399de899d66cdb67b32d7cb786", size = 558989, upload-time = "2025-08-27T12:15:21.087Z" }, - { url = "https://files.pythonhosted.org/packages/19/b2/76fa15173b6f9f445e5ef15120871b945fb8dd9044b6b8c7abe87e938416/rpds_py-0.27.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e202e6d4188e53c6661af813b46c37ca2c45e497fc558bacc1a7630ec2695aec", size = 589835, upload-time = "2025-08-27T12:15:22.696Z" }, - { url = "https://files.pythonhosted.org/packages/ee/9e/5560a4b39bab780405bed8a88ee85b30178061d189558a86003548dea045/rpds_py-0.27.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f41f814b8eaa48768d1bb551591f6ba45f87ac76899453e8ccd41dba1289b04b", size = 555227, upload-time = "2025-08-27T12:15:24.278Z" }, - { url = "https://files.pythonhosted.org/packages/52/d7/cd9c36215111aa65724c132bf709c6f35175973e90b32115dedc4ced09cb/rpds_py-0.27.1-cp39-cp39-win32.whl", hash = "sha256:9e71f5a087ead99563c11fdaceee83ee982fd39cf67601f4fd66cb386336ee52", size = 217899, upload-time = "2025-08-27T12:15:25.926Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e0/d75ab7b4dd8ba777f6b365adbdfc7614bbfe7c5f05703031dfa4b61c3d6c/rpds_py-0.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:71108900c9c3c8590697244b9519017a400d9ba26a36c48381b3f64743a44aab", size = 228725, upload-time = "2025-08-27T12:15:27.398Z" }, - { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, - { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, - { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, - { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, - { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, - { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, - { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, - { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, - { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, - { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, - { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, - { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, - { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, - { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, - { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, - { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, - { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, - { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, - { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, - { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ea/5463cd5048a7a2fcdae308b6e96432802132c141bfb9420260142632a0f1/rpds_py-0.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa8933159edc50be265ed22b401125c9eebff3171f570258854dbce3ecd55475", size = 371778, upload-time = "2025-08-27T12:16:13.851Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/f38c099db07f5114029c1467649d308543906933eebbc226d4527a5f4693/rpds_py-0.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50431bf02583e21bf273c71b89d710e7a710ad5e39c725b14e685610555926f", size = 354394, upload-time = "2025-08-27T12:16:15.609Z" }, - { url = "https://files.pythonhosted.org/packages/7d/79/b76f97704d9dd8ddbd76fed4c4048153a847c5d6003afe20a6b5c3339065/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78af06ddc7fe5cc0e967085a9115accee665fb912c22a3f54bad70cc65b05fe6", size = 382348, upload-time = "2025-08-27T12:16:17.251Z" }, - { url = "https://files.pythonhosted.org/packages/8a/3f/ef23d3c1be1b837b648a3016d5bbe7cfe711422ad110b4081c0a90ef5a53/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70d0738ef8fee13c003b100c2fbd667ec4f133468109b3472d249231108283a3", size = 394159, upload-time = "2025-08-27T12:16:19.251Z" }, - { url = "https://files.pythonhosted.org/packages/74/8a/9e62693af1a34fd28b1a190d463d12407bd7cf561748cb4745845d9548d3/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2f6fd8a1cea5bbe599b6e78a6e5ee08db434fc8ffea51ff201c8765679698b3", size = 522775, upload-time = "2025-08-27T12:16:20.929Z" }, - { url = "https://files.pythonhosted.org/packages/36/0d/8d5bb122bf7a60976b54c5c99a739a3819f49f02d69df3ea2ca2aff47d5c/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8177002868d1426305bb5de1e138161c2ec9eb2d939be38291d7c431c4712df8", size = 402633, upload-time = "2025-08-27T12:16:22.548Z" }, - { url = "https://files.pythonhosted.org/packages/0f/0e/237948c1f425e23e0cf5a566d702652a6e55c6f8fbd332a1792eb7043daf/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:008b839781d6c9bf3b6a8984d1d8e56f0ec46dc56df61fd669c49b58ae800400", size = 384867, upload-time = "2025-08-27T12:16:24.29Z" }, - { url = "https://files.pythonhosted.org/packages/d6/0a/da0813efcd998d260cbe876d97f55b0f469ada8ba9cbc47490a132554540/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:a55b9132bb1ade6c734ddd2759c8dc132aa63687d259e725221f106b83a0e485", size = 401791, upload-time = "2025-08-27T12:16:25.954Z" }, - { url = "https://files.pythonhosted.org/packages/51/78/c6c9e8a8aaca416a6f0d1b6b4a6ee35b88fe2c5401d02235d0a056eceed2/rpds_py-0.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a46fdec0083a26415f11d5f236b79fa1291c32aaa4a17684d82f7017a1f818b1", size = 419525, upload-time = "2025-08-27T12:16:27.659Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/5af37e1d71487cf6d56dd1420dc7e0c2732c1b6ff612aa7a88374061c0a8/rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8a63b640a7845f2bdd232eb0d0a4a2dd939bcdd6c57e6bb134526487f3160ec5", size = 559255, upload-time = "2025-08-27T12:16:29.343Z" }, - { url = "https://files.pythonhosted.org/packages/40/7f/8b7b136069ef7ac3960eda25d832639bdb163018a34c960ed042dd1707c8/rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:7e32721e5d4922deaaf963469d795d5bde6093207c52fec719bd22e5d1bedbc4", size = 590384, upload-time = "2025-08-27T12:16:31.005Z" }, - { url = "https://files.pythonhosted.org/packages/d8/06/c316d3f6ff03f43ccb0eba7de61376f8ec4ea850067dddfafe98274ae13c/rpds_py-0.27.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2c426b99a068601b5f4623573df7a7c3d72e87533a2dd2253353a03e7502566c", size = 555959, upload-time = "2025-08-27T12:16:32.73Z" }, - { url = "https://files.pythonhosted.org/packages/60/94/384cf54c430b9dac742bbd2ec26c23feb78ded0d43d6d78563a281aec017/rpds_py-0.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fc9b7fe29478824361ead6e14e4f5aed570d477e06088826537e202d25fe859", size = 228784, upload-time = "2025-08-27T12:16:34.428Z" }, ] [[package]] @@ -3891,6 +2913,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724", size = 85308, upload-time = "2025-07-18T19:22:40.947Z" }, ] +[[package]] +name = "scipy" +version = "1.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, + { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, + { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, + { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, + { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, + { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, + { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, + { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, + { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, + { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, + { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, + { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, + { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, + { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, + { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, + { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, + { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, + { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, + { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, +] + [[package]] name = "send2trash" version = "1.8.3" @@ -3932,7 +2995,7 @@ name = "slack-bolt" version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "slack-sdk", marker = "python_full_version >= '3.10'" }, + { name = "slack-sdk" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c3/14/0f490731fbfc95b5711e8124b30bb6e2a4be5edad22256891adad66f8b79/slack_bolt-1.26.0.tar.gz", hash = "sha256:b0b806b9dcf009ee50172830c1d170e231cd873c5b819703bbcdc59a0fe5ff3e", size = 129915, upload-time = "2025-10-06T23:41:51.708Z" } wheels = [ @@ -3971,7 +3034,7 @@ name = "sse-starlette" version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "python_full_version >= '3.10'" }, + { name = "anyio" }, ] sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } wheels = [ @@ -3997,60 +3060,81 @@ name = "starlette" version = "0.48.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10' and python_full_version < '3.13'" }, + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, ] +[[package]] +name = "stickler-eval" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonschema" }, + { name = "munkres" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "rapidfuzz" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/7d/001bffe60e417315c470e77d7d0832f3fce9565f9f491450a3d3497df2df/stickler_eval-0.1.2.tar.gz", hash = "sha256:a9bded773c0c4d8ee08ba03b422ee4655c6a7292f572264f2cd74a565fdbc3f8", size = 132677, upload-time = "2025-11-06T20:07:41.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/cf/e37a0731e2a1e4c48b5dbf4dd7b1f8d421367ed9a8fb09a1f78ccdebaaaf/stickler_eval-0.1.2-py3-none-any.whl", hash = "sha256:1631bb40cf625e473c22e92158e927412b59bb1f267c06ba47750bac71cc8483", size = 161224, upload-time = "2025-11-06T20:07:40.556Z" }, +] + [[package]] name = "strands-agents" -version = "1.12.0" +version = "1.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "boto3", marker = "python_full_version >= '3.10'" }, - { name = "botocore", marker = "python_full_version >= '3.10'" }, - { name = "docstring-parser", marker = "python_full_version >= '3.10'" }, - { name = "mcp", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-instrumentation-threading", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-sdk", marker = "python_full_version >= '3.10'" }, - { name = "pydantic", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, - { name = "watchdog", marker = "python_full_version >= '3.10'" }, + { name = "boto3" }, + { name = "botocore" }, + { name = "docstring-parser" }, + { name = "jsonschema" }, + { name = "mcp" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation-threading" }, + { name = "opentelemetry-sdk" }, + { name = "pydantic" }, + { name = "typing-extensions" }, + { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/56/3d3cb9bf62d45f97befe82fbb73ad65b46e9a6efd21151c38c466cd87c11/strands_agents-1.12.0.tar.gz", hash = "sha256:8f17e775971505ab7841a3139cde9879632a26cdd9cd55be74de83f0e7f804c0", size = 418141, upload-time = "2025-10-10T15:16:45.753Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/dd/a2dc96614bb1dd7c1623cbdf6df268eb307038b2fe27bc5a6148f4223f59/strands_agents-1.14.0.tar.gz", hash = "sha256:f86dd2b92d50196acd0c5ff5404fcd1b6c3715ae56fcceb2a78210ab47860585", size = 471216, upload-time = "2025-10-29T14:20:27.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/05/2f0fbce4a3acd7b9c042368bbe8038409a7b30d65138bd3b37a06d1a4cc4/strands_agents-1.12.0-py3-none-any.whl", hash = "sha256:af0f9c8a175666009863d0fb4438e71000ea3a2f0cbda3dc308c35dd4f9a1eb0", size = 216043, upload-time = "2025-10-10T15:16:44.043Z" }, + { url = "https://files.pythonhosted.org/packages/8f/08/848c26d917d8f75bc10edeea63578af218df5c6efeb69e3cf91eb8fd396e/strands_agents-1.14.0-py3-none-any.whl", hash = "sha256:d2ebc1b991c37e891cfe79cf5dea1bfa021c3e6d93e4d2f238042ea688019bf3", size = 238974, upload-time = "2025-10-29T14:20:24.841Z" }, ] [[package]] name = "strands-agents-tools" -version = "0.2.11" +version = "0.2.13" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "aiohttp", marker = "python_full_version >= '3.10'" }, - { name = "aws-requests-auth", marker = "python_full_version >= '3.10'" }, - { name = "botocore", marker = "python_full_version >= '3.10'" }, - { name = "dill", marker = "python_full_version >= '3.10'" }, - { name = "markdownify", marker = "python_full_version >= '3.10'" }, - { name = "pillow", marker = "python_full_version >= '3.10'" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.10'" }, - { name = "pyjwt", marker = "python_full_version >= '3.10'" }, - { name = "requests", marker = "python_full_version >= '3.10'" }, - { name = "rich", marker = "python_full_version >= '3.10'" }, - { name = "slack-bolt", marker = "python_full_version >= '3.10'" }, - { name = "strands-agents", marker = "python_full_version >= '3.10'" }, - { name = "sympy", marker = "python_full_version >= '3.10'" }, - { name = "tenacity", marker = "python_full_version >= '3.10'" }, - { name = "tzdata", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, - { name = "watchdog", marker = "python_full_version >= '3.10'" }, + { name = "aiohttp" }, + { name = "aws-requests-auth" }, + { name = "botocore" }, + { name = "dill" }, + { name = "markdownify" }, + { name = "pillow" }, + { name = "prompt-toolkit" }, + { name = "pyjwt" }, + { name = "requests" }, + { name = "rich" }, + { name = "slack-bolt" }, + { name = "strands-agents" }, + { name = "sympy" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, + { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/37/a2/5cd71b9db26c98d6289883fe969e884964fbd2a4b78cb75073d6651f0041/strands_agents_tools-0.2.11.tar.gz", hash = "sha256:5ef192b68eddeccb96c47227ca841ccce3aedff5db0953a0af7b7212a09428df", size = 445792, upload-time = "2025-10-10T16:58:26.396Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/d3/50aa7bb388056d814017add5489b68231c25924efce1079bd5b91baf297b/strands_agents_tools-0.2.13.tar.gz", hash = "sha256:31050f6dde57fd6524709c021dcb25946685746bcc5e000112db94b279a2a937", size = 451115, upload-time = "2025-10-29T14:22:46.781Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/1c/a903b4947e3e0e56c2a1db7008c286c399b2e37c6362c569e8a07006e046/strands_agents_tools-0.2.11-py3-none-any.whl", hash = "sha256:ebff41ba782e1ce59530e11321780eae0ffdb5b61e7aee7408c46c1a8f29f18d", size = 297958, upload-time = "2025-10-10T16:58:24.213Z" }, + { url = "https://files.pythonhosted.org/packages/43/af/3f42cfb73761817ecd81c04405e6789b4c0b3a24daeb7834952d22b0540d/strands_agents_tools-0.2.13-py3-none-any.whl", hash = "sha256:ddc7f95f1e52ce819039f2961b8428f901b9597bdbb1498793950350672965f3", size = 299642, upload-time = "2025-10-29T14:22:44.196Z" }, ] [[package]] @@ -4058,7 +3142,7 @@ name = "sympy" version = "1.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "mpmath", marker = "python_full_version >= '3.10'" }, + { name = "mpmath" }, ] sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } wheels = [ @@ -4109,39 +3193,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, ] -[[package]] -name = "tomli" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, -] - [[package]] name = "tornado" version = "6.5.2" @@ -4175,7 +3226,6 @@ name = "typeguard" version = "4.4.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } @@ -4188,8 +3238,7 @@ name = "typer" version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "click" }, { name = "rich" }, { name = "shellingham" }, { name = "typing-extensions" }, @@ -4247,27 +3296,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" }, ] -[[package]] -name = "urllib3" -version = "1.26.20" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, -] - [[package]] name = "urllib3" version = "2.5.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", -] sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, @@ -4278,9 +3310,8 @@ name = "uvicorn" version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "h11", marker = "python_full_version >= '3.10'" }, - { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, + { name = "click" }, + { name = "h11" }, ] sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ @@ -4293,25 +3324,12 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/56/90994d789c61df619bfc5ce2ecdabd5eeff564e1eb47512bd01b5e019569/watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26", size = 96390, upload-time = "2024-11-01T14:06:24.793Z" }, - { url = "https://files.pythonhosted.org/packages/55/46/9a67ee697342ddf3c6daa97e3a587a56d6c4052f881ed926a849fcf7371c/watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112", size = 88389, upload-time = "2024-11-01T14:06:27.112Z" }, - { url = "https://files.pythonhosted.org/packages/44/65/91b0985747c52064d8701e1075eb96f8c40a79df889e59a399453adfb882/watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3", size = 89020, upload-time = "2024-11-01T14:06:29.876Z" }, - { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, - { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, - { url = "https://files.pythonhosted.org/packages/05/52/7223011bb760fce8ddc53416beb65b83a3ea6d7d13738dde75eeb2c89679/watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8", size = 96390, upload-time = "2024-11-01T14:06:49.325Z" }, - { url = "https://files.pythonhosted.org/packages/9c/62/d2b21bc4e706d3a9d467561f487c2938cbd881c69f3808c43ac1ec242391/watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a", size = 88386, upload-time = "2024-11-01T14:06:50.536Z" }, - { url = "https://files.pythonhosted.org/packages/ea/22/1c90b20eda9f4132e4603a26296108728a8bfe9584b006bd05dd94548853/watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c", size = 89017, upload-time = "2024-11-01T14:06:51.717Z" }, - { url = "https://files.pythonhosted.org/packages/30/ad/d17b5d42e28a8b91f8ed01cb949da092827afb9995d4559fd448d0472763/watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881", size = 87902, upload-time = "2024-11-01T14:06:53.119Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ca/c3649991d140ff6ab67bfc85ab42b165ead119c9e12211e08089d763ece5/watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11", size = 88380, upload-time = "2024-11-01T14:06:55.19Z" }, - { url = "https://files.pythonhosted.org/packages/5b/79/69f2b0e8d3f2afd462029031baafb1b75d11bb62703f0e1022b2e54d49ee/watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa", size = 87903, upload-time = "2024-11-01T14:06:57.052Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2b/dc048dd71c2e5f0f7ebc04dd7912981ec45793a03c0dc462438e0591ba5d/watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e", size = 88381, upload-time = "2024-11-01T14:06:58.193Z" }, { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, @@ -4387,26 +3405,6 @@ version = "1.17.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04", size = 53482, upload-time = "2025-08-12T05:51:44.467Z" }, - { url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2", size = 38676, upload-time = "2025-08-12T05:51:32.636Z" }, - { url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c", size = 38957, upload-time = "2025-08-12T05:51:54.655Z" }, - { url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775", size = 81975, upload-time = "2025-08-12T05:52:30.109Z" }, - { url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd", size = 83149, upload-time = "2025-08-12T05:52:09.316Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05", size = 82209, upload-time = "2025-08-12T05:52:10.331Z" }, - { url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418", size = 81551, upload-time = "2025-08-12T05:52:31.137Z" }, - { url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390", size = 36464, upload-time = "2025-08-12T05:53:01.204Z" }, - { url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6", size = 38748, upload-time = "2025-08-12T05:53:00.209Z" }, - { url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18", size = 36810, upload-time = "2025-08-12T05:52:51.906Z" }, - { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, - { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, - { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, - { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, - { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, - { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, - { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, - { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, @@ -4427,16 +3425,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, - { url = "https://files.pythonhosted.org/packages/41/be/be9b3b0a461ee3e30278706f3f3759b9b69afeedef7fe686036286c04ac6/wrapt-1.17.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc", size = 53485, upload-time = "2025-08-12T05:51:53.11Z" }, - { url = "https://files.pythonhosted.org/packages/b3/a8/8f61d6b8f526efc8c10e12bf80b4206099fea78ade70427846a37bc9cbea/wrapt-1.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9", size = 38675, upload-time = "2025-08-12T05:51:42.885Z" }, - { url = "https://files.pythonhosted.org/packages/48/f1/23950c29a25637b74b322f9e425a17cc01a478f6afb35138ecb697f9558d/wrapt-1.17.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d", size = 38956, upload-time = "2025-08-12T05:52:03.149Z" }, - { url = "https://files.pythonhosted.org/packages/43/46/dd0791943613885f62619f18ee6107e6133237a6b6ed8a9ecfac339d0b4f/wrapt-1.17.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a", size = 81745, upload-time = "2025-08-12T05:52:49.62Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ec/bb2d19bd1a614cc4f438abac13ae26c57186197920432d2a915183b15a8b/wrapt-1.17.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139", size = 82833, upload-time = "2025-08-12T05:52:27.738Z" }, - { url = "https://files.pythonhosted.org/packages/8d/eb/66579aea6ad36f07617fedca8e282e49c7c9bab64c63b446cfe4f7f47a49/wrapt-1.17.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df", size = 81889, upload-time = "2025-08-12T05:52:29.023Z" }, - { url = "https://files.pythonhosted.org/packages/04/9c/a56b5ac0e2473bdc3fb11b22dd69ff423154d63861cf77911cdde5e38fd2/wrapt-1.17.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b", size = 81344, upload-time = "2025-08-12T05:52:50.869Z" }, - { url = "https://files.pythonhosted.org/packages/93/4c/9bd735c42641d81cb58d7bfb142c58f95c833962d15113026705add41a07/wrapt-1.17.3-cp39-cp39-win32.whl", hash = "sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81", size = 36462, upload-time = "2025-08-12T05:53:19.623Z" }, - { url = "https://files.pythonhosted.org/packages/f0/ea/0b72f29cb5ebc16eb55c57dc0c98e5de76fc97f435fd407f7d409459c0a6/wrapt-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f", size = 38740, upload-time = "2025-08-12T05:53:18.271Z" }, - { url = "https://files.pythonhosted.org/packages/c3/8b/9eae65fb92321e38dbfec7719b87d840a4b92fde83fd1bbf238c5488d055/wrapt-1.17.3-cp39-cp39-win_arm64.whl", hash = "sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f", size = 36806, upload-time = "2025-08-12T05:52:58.765Z" }, { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] @@ -4469,38 +3457,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/43/a2204825342f37c337f5edb6637040fa14e365b2fcc2346960201d457579/yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e", size = 140517, upload-time = "2025-10-06T14:08:42.494Z" }, - { url = "https://files.pythonhosted.org/packages/44/6f/674f3e6f02266428c56f704cd2501c22f78e8b2eeb23f153117cc86fb28a/yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f", size = 93495, upload-time = "2025-10-06T14:08:46.2Z" }, - { url = "https://files.pythonhosted.org/packages/b8/12/5b274d8a0f30c07b91b2f02cba69152600b47830fcfb465c108880fcee9c/yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf", size = 94400, upload-time = "2025-10-06T14:08:47.855Z" }, - { url = "https://files.pythonhosted.org/packages/e2/7f/df1b6949b1fa1aa9ff6de6e2631876ad4b73c4437822026e85d8acb56bb1/yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a", size = 347545, upload-time = "2025-10-06T14:08:49.683Z" }, - { url = "https://files.pythonhosted.org/packages/84/09/f92ed93bd6cd77872ab6c3462df45ca45cd058d8f1d0c9b4f54c1704429f/yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c", size = 319598, upload-time = "2025-10-06T14:08:51.215Z" }, - { url = "https://files.pythonhosted.org/packages/c3/97/ac3f3feae7d522cf7ccec3d340bb0b2b61c56cb9767923df62a135092c6b/yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147", size = 363893, upload-time = "2025-10-06T14:08:53.144Z" }, - { url = "https://files.pythonhosted.org/packages/06/49/f3219097403b9c84a4d079b1d7bda62dd9b86d0d6e4428c02d46ab2c77fc/yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb", size = 371240, upload-time = "2025-10-06T14:08:55.036Z" }, - { url = "https://files.pythonhosted.org/packages/35/9f/06b765d45c0e44e8ecf0fe15c9eacbbde342bb5b7561c46944f107bfb6c3/yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6", size = 346965, upload-time = "2025-10-06T14:08:56.722Z" }, - { url = "https://files.pythonhosted.org/packages/c5/69/599e7cea8d0fcb1694323b0db0dda317fa3162f7b90166faddecf532166f/yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0", size = 342026, upload-time = "2025-10-06T14:08:58.563Z" }, - { url = "https://files.pythonhosted.org/packages/95/6f/9dfd12c8bc90fea9eab39832ee32ea48f8e53d1256252a77b710c065c89f/yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda", size = 335637, upload-time = "2025-10-06T14:09:00.506Z" }, - { url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc", size = 359082, upload-time = "2025-10-06T14:09:01.936Z" }, - { url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737", size = 357811, upload-time = "2025-10-06T14:09:03.445Z" }, - { url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467", size = 351223, upload-time = "2025-10-06T14:09:05.401Z" }, - { url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea", size = 82118, upload-time = "2025-10-06T14:09:11.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca", size = 86852, upload-time = "2025-10-06T14:09:12.958Z" }, - { url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b", size = 82012, upload-time = "2025-10-06T14:09:14.664Z" }, - { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, - { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, - { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, - { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, - { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, - { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, - { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, - { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, - { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, - { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, - { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, - { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, - { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, @@ -4549,22 +3505,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, - { url = "https://files.pythonhosted.org/packages/94/fd/6480106702a79bcceda5fd9c63cb19a04a6506bd5ce7fd8d9b63742f0021/yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748", size = 141301, upload-time = "2025-10-06T14:12:19.01Z" }, - { url = "https://files.pythonhosted.org/packages/42/e1/6d95d21b17a93e793e4ec420a925fe1f6a9342338ca7a563ed21129c0990/yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859", size = 93864, upload-time = "2025-10-06T14:12:21.05Z" }, - { url = "https://files.pythonhosted.org/packages/32/58/b8055273c203968e89808413ea4c984988b6649baabf10f4522e67c22d2f/yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9", size = 94706, upload-time = "2025-10-06T14:12:23.287Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/d7bfbc28a88c2895ecd0da6a874def0c147de78afc52c773c28e1aa233a3/yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054", size = 347100, upload-time = "2025-10-06T14:12:28.527Z" }, - { url = "https://files.pythonhosted.org/packages/bd/e8/37a1e7b99721c0564b1fc7b0a4d1f595ef6fb8060d82ca61775b644185f7/yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b", size = 318902, upload-time = "2025-10-06T14:12:30.528Z" }, - { url = "https://files.pythonhosted.org/packages/1c/ef/34724449d7ef2db4f22df644f2dac0b8a275d20f585e526937b3ae47b02d/yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60", size = 363302, upload-time = "2025-10-06T14:12:32.295Z" }, - { url = "https://files.pythonhosted.org/packages/8a/04/88a39a5dad39889f192cce8d66cc4c58dbeca983e83f9b6bf23822a7ed91/yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890", size = 370816, upload-time = "2025-10-06T14:12:34.01Z" }, - { url = "https://files.pythonhosted.org/packages/6b/1f/5e895e547129413f56c76be2c3ce4b96c797d2d0ff3e16a817d9269b12e6/yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba", size = 346465, upload-time = "2025-10-06T14:12:35.977Z" }, - { url = "https://files.pythonhosted.org/packages/11/13/a750e9fd6f9cc9ed3a52a70fe58ffe505322f0efe0d48e1fd9ffe53281f5/yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca", size = 341506, upload-time = "2025-10-06T14:12:37.788Z" }, - { url = "https://files.pythonhosted.org/packages/3c/67/bb6024de76e7186611ebe626aec5b71a2d2ecf9453e795f2dbd80614784c/yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba", size = 335030, upload-time = "2025-10-06T14:12:39.775Z" }, - { url = "https://files.pythonhosted.org/packages/a2/be/50b38447fd94a7992996a62b8b463d0579323fcfc08c61bdba949eef8a5d/yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b", size = 358560, upload-time = "2025-10-06T14:12:41.547Z" }, - { url = "https://files.pythonhosted.org/packages/e2/89/c020b6f547578c4e3dbb6335bf918f26e2f34ad0d1e515d72fd33ac0c635/yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e", size = 357290, upload-time = "2025-10-06T14:12:43.861Z" }, - { url = "https://files.pythonhosted.org/packages/8c/52/c49a619ee35a402fa3a7019a4fa8d26878fec0d1243f6968bbf516789578/yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8", size = 350700, upload-time = "2025-10-06T14:12:46.868Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c9/f5042d87777bf6968435f04a2bbb15466b2f142e6e47fa4f34d1a3f32f0c/yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b", size = 82323, upload-time = "2025-10-06T14:12:48.633Z" }, - { url = "https://files.pythonhosted.org/packages/fd/58/d00f7cad9eba20c4eefac2682f34661d1d1b3a942fc0092eb60e78cfb733/yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed", size = 87145, upload-time = "2025-10-06T14:12:50.241Z" }, - { url = "https://files.pythonhosted.org/packages/c2/a3/70904f365080780d38b919edd42d224b8c4ce224a86950d2eaa2a24366ad/yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2", size = 82173, upload-time = "2025-10-06T14:12:51.869Z" }, { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] diff --git a/patterns/pattern-2/src/assessment_function/index.py b/patterns/pattern-2/src/assessment_function/index.py index 59ec9004..e1ea12bd 100644 --- a/patterns/pattern-2/src/assessment_function/index.py +++ b/patterns/pattern-2/src/assessment_function/index.py @@ -10,88 +10,99 @@ from idp_common.models import Document, Status from idp_common.docs_service import create_document_service from idp_common import s3 -from idp_common.utils import normalize_boolean_value, calculate_lambda_metering, merge_metering_data +from idp_common.utils import ( + normalize_boolean_value, + calculate_lambda_metering, + merge_metering_data, +) from assessment_validator import AssessmentValidator from aws_xray_sdk.core import xray_recorder, patch_all patch_all() + # Custom exception for throttling scenarios class ThrottlingException(Exception): """Exception raised when throttling is detected in document processing results""" + pass + # Throttling detection constants THROTTLING_KEYWORDS = [ "throttlingexception", - "provisionedthroughputexceededexception", + "provisionedthroughputexceededexception", "servicequotaexceededexception", "toomanyrequestsexception", "requestlimitexceeded", "too many tokens", "please wait before trying again", - "reached max retries" + "reached max retries", ] THROTTLING_EXCEPTIONS = [ "ThrottlingException", "ProvisionedThroughputExceededException", - "ServiceQuotaExceededException", + "ServiceQuotaExceededException", "TooManyRequestsException", - "RequestLimitExceeded" + "RequestLimitExceeded", ] # Configuration will be loaded in handler function logger = logging.getLogger() logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) -logging.getLogger('idp_common.bedrock.client').setLevel(os.environ.get("BEDROCK_LOG_LEVEL", "INFO")) +logging.getLogger("idp_common.bedrock.client").setLevel( + os.environ.get("BEDROCK_LOG_LEVEL", "INFO") +) + def is_throttling_exception(exception): """ Check if an exception is related to throttling. - + Args: exception: The exception to check - + Returns: bool: True if the exception is throttling-related, False otherwise """ from botocore.exceptions import ClientError - + if isinstance(exception, ClientError): - error_code = exception.response.get('Error', {}).get('Code', '') + error_code = exception.response.get("Error", {}).get("Code", "") return error_code in THROTTLING_EXCEPTIONS - + exception_name = type(exception).__name__ exception_message = str(exception).lower() - - return ( - exception_name in THROTTLING_EXCEPTIONS or - any(keyword in exception_message for keyword in THROTTLING_KEYWORDS) + + return exception_name in THROTTLING_EXCEPTIONS or any( + keyword in exception_message for keyword in THROTTLING_KEYWORDS ) + def check_document_for_throttling_errors(document): """ Check if a document has throttling errors in its errors field. - + Args: document: The document object to check - + Returns: tuple: (has_throttling_errors: bool, first_throttling_error: str or None) """ if document.status != Status.FAILED or not document.errors: return False, None - + for error_msg in document.errors: error_lower = str(error_msg).lower() if any(keyword in error_lower for keyword in THROTTLING_KEYWORDS): return True, error_msg - + return False, None -@xray_recorder.capture('assessment_function') + +@xray_recorder.capture("assessment_function") def handler(event, context): """ Lambda handler for document assessment. @@ -99,32 +110,36 @@ def handler(event, context): using the Assessment service from the idp_common library. """ start_time = time.time() # Capture start time for Lambda metering - logger.info(f"Starting assessment processing for event: {json.dumps(event, default=str)}") + logger.info( + f"Starting assessment processing for event: {json.dumps(event, default=str)}" + ) # Load configuration config = get_config(as_model=True) # Use default=str to handle Decimal and other non-serializable types logger.info(f"Config: {json.dumps(config.model_dump(), default=str)}") - + # Extract input from event - handle both compressed and uncompressed - document_data = event.get('document', {}) - section_id = event.get('section_id') - + document_data = event.get("document", {}) + section_id = event.get("section_id") + # Validate inputs if not document_data: raise ValueError("No document provided in event") - + if not section_id: raise ValueError("No section_id provided in event") - + # Convert document data to Document object - handle compression - working_bucket = os.environ.get('WORKING_BUCKET') + working_bucket = os.environ.get("WORKING_BUCKET") document = Document.load_document(document_data, working_bucket, logger) - logger.info(f"Processing assessment for document {document.id}, section {section_id}") + logger.info( + f"Processing assessment for document {document.id}, section {section_id}" + ) # X-Ray annotations - xray_recorder.put_annotation('document_id', {document.id}) - xray_recorder.put_annotation('processing_stage', 'assessment') + xray_recorder.put_annotation("document_id", {document.id}) + xray_recorder.put_annotation("processing_stage", "assessment") # Find the section we're processing section = None @@ -132,24 +147,32 @@ def handler(event, context): if s.section_id == section_id: section = s break - + if not section: raise ValueError(f"Section {section_id} not found in document") # Check if granular assessment is enabled (moved earlier for Lambda metering context) - assessment_context = "GranularAssessment" if config.assessment.granular.enabled else "Assessment" - logger.info(f"Assessment mode: {'Granular' if config.assessment.granular.enabled else 'Regular'} (context: {assessment_context})") + assessment_context = ( + "GranularAssessment" if config.assessment.granular.enabled else "Assessment" + ) + logger.info( + f"Assessment mode: {'Granular' if config.assessment.granular.enabled else 'Regular'} (context: {assessment_context})" + ) # Intelligent Assessment Skip: Check if extraction results already contain explainability_info if section.extraction_result_uri and section.extraction_result_uri.strip(): try: - logger.info(f"Checking extraction results for existing assessment: {section.extraction_result_uri}") + logger.info( + f"Checking extraction results for existing assessment: {section.extraction_result_uri}" + ) extraction_data = s3.get_json_content(section.extraction_result_uri) - + # If explainability_info exists, assessment was already done - if extraction_data.get('explainability_info'): - logger.info(f"Skipping assessment for section {section_id} - extraction results already contain explainability_info") - + if extraction_data.get("explainability_info"): + logger.info( + f"Skipping assessment for section {section_id} - extraction results already contain explainability_info" + ) + # Create section-specific document (same as normal processing) to match output format section_document = Document( id=document.id, @@ -168,43 +191,57 @@ def handler(event, context): evaluation_report_uri=document.evaluation_report_uri, evaluation_results_uri=document.evaluation_results_uri, errors=document.errors, - metering={} # Empty metering for skipped processing + metering={}, # Empty metering for skipped processing ) - + # Add only the pages needed for this section for page_id in section.page_ids: if page_id in document.pages: section_document.pages[page_id] = document.pages[page_id] - + # Add only the section being processed (preserve existing data) section_document.sections = [section] - + # Add Lambda metering for assessment skip execution with dynamic context try: - lambda_metering = calculate_lambda_metering(assessment_context, context, start_time) - section_document.metering = merge_metering_data(section_document.metering, lambda_metering) + lambda_metering = calculate_lambda_metering( + assessment_context, context, start_time + ) + section_document.metering = merge_metering_data( + section_document.metering, lambda_metering + ) except Exception as e: - logger.warning(f"Failed to add Lambda metering for assessment skip: {str(e)}") - + logger.warning( + f"Failed to add Lambda metering for assessment skip: {str(e)}" + ) + # Return consistent format for Map state collation response = { - "section_id": section_id, - "document": section_document.serialize_document(working_bucket, f"assessment_skip_{section_id}", logger) + "section_id": section_id, + "document": section_document.serialize_document( + working_bucket, f"assessment_skip_{section_id}", logger + ), } - - logger.info(f"Assessment skipped - Response: {json.dumps(response, default=str)}") + + logger.info( + f"Assessment skipped - Response: {json.dumps(response, default=str)}" + ) return response else: - logger.info(f"Assessment needed for section {section_id} - no explainability_info found in extraction results") + logger.info( + f"Assessment needed for section {section_id} - no explainability_info found in extraction results" + ) except Exception as e: - logger.warning(f"Error checking extraction results for assessment skip: {e}") + logger.warning( + f"Error checking extraction results for assessment skip: {e}" + ) # Continue with normal assessment if check fails # Normal assessment processing document.status = Status.ASSESSING # Update document status to ASSESSING for UI only - # Create new 'shell' document since our input document has only 1 section. + # Create new 'shell' document since our input document has only 1 section. docStatus = Document( id=document.id, input_key=document.input_key, @@ -215,76 +252,89 @@ def handler(event, context): document_service.update_document(docStatus) # Initialize assessment service with cache table for enhanced retry handling - cache_table = os.environ.get('TRACKING_TABLE') - + cache_table = os.environ.get("TRACKING_TABLE") + # Check if granular assessment is enabled - - if config.assessment.granular.enabled: - # Use enhanced granular assessment service with caching and retry support - from idp_common.assessment.granular_service import GranularAssessmentService - assessment_service = GranularAssessmentService(config=config, cache_table=cache_table) - logger.info("Using granular assessment service with enhanced error handling and caching") - else: - # Use regular assessment service - assessment_service = assessment.AssessmentService(config=config) - logger.info("Using regular assessment service") + + # Use granular assessment service (always enabled) + from idp_common.assessment.granular_service import GranularAssessmentService + + assessment_service = GranularAssessmentService( + config=config, cache_table=cache_table + ) + logger.info("Using granular assessment service") # Process the document section for assessment t0 = time.time() logger.info(f"Starting assessment for section {section_id}") - + try: - updated_document = assessment_service.process_document_section(document, section_id) + updated_document = assessment_service.process_document_section( + document, section_id + ) t1 = time.time() - logger.info(f"Total assessment time: {t1-t0:.2f} seconds") - + logger.info(f"Total assessment time: {t1 - t0:.2f} seconds") + # Check for failed assessment tasks that might require retry (granular assessment) - if hasattr(updated_document, 'metadata') and updated_document.metadata: - failed_tasks = updated_document.metadata.get('failed_assessment_tasks', {}) + if hasattr(updated_document, "metadata") and updated_document.metadata: + failed_tasks = updated_document.metadata.get("failed_assessment_tasks", {}) if failed_tasks: throttling_tasks = { - task_id: task_info for task_id, task_info in failed_tasks.items() - if task_info.get('is_throttling', False) + task_id: task_info + for task_id, task_info in failed_tasks.items() + if task_info.get("is_throttling", False) } - + logger.warning( f"Assessment completed with {len(failed_tasks)} failed tasks, " f"{len(throttling_tasks)} due to throttling" ) - + if throttling_tasks: logger.info( f"Throttling detected in {len(throttling_tasks)} tasks. " f"Successful tasks have been cached for retry." ) - + # Check for throttling errors in document status and errors field - has_throttling, throttling_error = check_document_for_throttling_errors(updated_document) + has_throttling, throttling_error = check_document_for_throttling_errors( + updated_document + ) if has_throttling: - logger.error(f"Throttling error detected in document errors: {throttling_error}") + logger.error( + f"Throttling error detected in document errors: {throttling_error}" + ) logger.error("Raising ThrottlingException to trigger Step Functions retry") - raise ThrottlingException(f"Throttling detected in document processing: {throttling_error}") - + raise ThrottlingException( + f"Throttling detected in document processing: {throttling_error}" + ) + except Exception as e: t1 = time.time() - logger.error(f"Assessment failed after {t1-t0:.2f} seconds: {str(e)}") - + logger.error(f"Assessment failed after {t1 - t0:.2f} seconds: {str(e)}") + # Check if this is a throttling exception that should trigger retry if is_throttling_exception(e): - logger.error(f"Throttling exception detected: {type(e).__name__}. This will trigger state machine retry.") + logger.error( + f"Throttling exception detected: {type(e).__name__}. This will trigger state machine retry." + ) # Update document status before re-raising document_service.update_document(docStatus) # Re-raise to trigger state machine retry raise else: - logger.error(f"Non-throttling exception: {type(e).__name__}. Marking document as failed.") + logger.error( + f"Non-throttling exception: {type(e).__name__}. Marking document as failed." + ) # Set document status to failed for non-throttling exceptions updated_document = document updated_document.status = Status.FAILED updated_document.errors.append(str(e)) # Assessment validation - validation_enabled = config.assessment.granular.enabled and config.assessment.validation_enabled + validation_enabled = ( + config.assessment.granular.enabled and config.assessment.validation_enabled + ) logger.info(f"Assessment Enabled:{config.assessment.granular.enabled}") logger.info(f"Validation Enabled:{validation_enabled}") if not config.assessment.granular.enabled: @@ -294,33 +344,43 @@ def handler(event, context): else: for section in updated_document.sections: if section.section_id == section_id and section.extraction_result_uri: - logger.info(f"Loading assessment results from: {section.extraction_result_uri}") + logger.info( + f"Loading assessment results from: {section.extraction_result_uri}" + ) # Load extraction data with assessment results extraction_data = s3.get_json_content(section.extraction_result_uri) - validator = AssessmentValidator(extraction_data, - assessment_config=config.assessment, - enable_missing_check=True, - enable_count_check=True) + validator = AssessmentValidator( + extraction_data, + assessment_config=config.assessment, + enable_missing_check=True, + enable_count_check=True, + ) validation_results = validator.validate_all() - if not validation_results['is_valid']: + if not validation_results["is_valid"]: # Handle validation failure updated_document.status = Status.FAILED - validation_errors = validation_results['validation_errors'] + validation_errors = validation_results["validation_errors"] updated_document.errors.extend(validation_errors) logger.error(f"Validation Error: {validation_errors}") # Add Lambda metering for successful assessment execution with dynamic context try: - lambda_metering = calculate_lambda_metering(assessment_context, context, start_time) - updated_document.metering = merge_metering_data(updated_document.metering, lambda_metering) + lambda_metering = calculate_lambda_metering( + assessment_context, context, start_time + ) + updated_document.metering = merge_metering_data( + updated_document.metering, lambda_metering + ) except Exception as e: logger.warning(f"Failed to add Lambda metering for assessment: {str(e)}") # Prepare output with automatic compression if needed result = { - 'document': updated_document.serialize_document(working_bucket, f"assessment_{section_id}", logger), - 'section_id': section_id + "document": updated_document.serialize_document( + working_bucket, f"assessment_{section_id}", logger + ), + "section_id": section_id, } - + logger.info("Assessment processing completed") return result diff --git a/patterns/pattern-3/src/assessment_function/index.py b/patterns/pattern-3/src/assessment_function/index.py index 38cbcb4a..ad29b551 100644 --- a/patterns/pattern-3/src/assessment_function/index.py +++ b/patterns/pattern-3/src/assessment_function/index.py @@ -15,7 +15,10 @@ logger = logging.getLogger() logger.setLevel(os.environ.get("LOG_LEVEL", "INFO")) -logging.getLogger('idp_common.bedrock.client').setLevel(os.environ.get("BEDROCK_LOG_LEVEL", "INFO")) +logging.getLogger("idp_common.bedrock.client").setLevel( + os.environ.get("BEDROCK_LOG_LEVEL", "INFO") +) + def handler(event, context): """ @@ -24,27 +27,31 @@ def handler(event, context): using the Assessment service from the idp_common library. """ start_time = time.time() # Capture start time for Lambda metering - logger.info(f"Starting assessment processing for event: {json.dumps(event, default=str)}") + logger.info( + f"Starting assessment processing for event: {json.dumps(event, default=str)}" + ) # Load configuration - config = get_config(as_model = True) + config = get_config(as_model=True) logger.info(f"Config: {json.dumps(config.model_dump(), default=str)}") - + # Extract input from event - handle both compressed and uncompressed - document_data = event.get('document', {}) - section_id = event.get('section_id') - + document_data = event.get("document", {}) + section_id = event.get("section_id") + # Validate inputs if not document_data: raise ValueError("No document provided in event") - + if not section_id: raise ValueError("No section_id provided in event") - + # Convert document data to Document object - handle compression - working_bucket = os.environ.get('WORKING_BUCKET') + working_bucket = os.environ.get("WORKING_BUCKET") document = Document.load_document(document_data, working_bucket, logger) - logger.info(f"Processing assessment for document {document.id}, section {section_id}") + logger.info( + f"Processing assessment for document {document.id}, section {section_id}" + ) # Find the section we're processing section = None @@ -52,7 +59,7 @@ def handler(event, context): if s.section_id == section_id: section = s break - + if not section: raise ValueError(f"Section {section_id} not found in document") @@ -60,19 +67,26 @@ def handler(event, context): granular_config = config.assessment.granular granular_enabled = granular_config.enabled assessment_context = "GranularAssessment" if granular_enabled else "Assessment" - logger.info(f"Assessment mode: {'Granular' if granular_enabled else 'Regular'} (context: {assessment_context})") + logger.info( + f"Assessment mode: {'Granular' if granular_enabled else 'Regular'} (context: {assessment_context})" + ) # Intelligent Assessment Skip: Check if extraction results already contain explainability_info if section.extraction_result_uri and section.extraction_result_uri.strip(): try: from idp_common import s3 - logger.info(f"Checking extraction results for existing assessment: {section.extraction_result_uri}") + + logger.info( + f"Checking extraction results for existing assessment: {section.extraction_result_uri}" + ) extraction_data = s3.get_json_content(section.extraction_result_uri) - + # If explainability_info exists, assessment was already done - if extraction_data.get('explainability_info'): - logger.info(f"Skipping assessment for section {section_id} - extraction results already contain explainability_info") - + if extraction_data.get("explainability_info"): + logger.info( + f"Skipping assessment for section {section_id} - extraction results already contain explainability_info" + ) + # Create section-specific document (same as normal processing) to match output format section_document = Document( id=document.id, @@ -91,43 +105,57 @@ def handler(event, context): evaluation_report_uri=document.evaluation_report_uri, evaluation_results_uri=document.evaluation_results_uri, errors=document.errors, - metering={} # Empty metering for skipped processing + metering={}, # Empty metering for skipped processing ) - + # Add only the pages needed for this section for page_id in section.page_ids: if page_id in document.pages: section_document.pages[page_id] = document.pages[page_id] - + # Add only the section being processed (preserve existing data) section_document.sections = [section] - + # Add Lambda metering for assessment skip execution with dynamic context try: - lambda_metering = calculate_lambda_metering(assessment_context, context, start_time) - section_document.metering = merge_metering_data(section_document.metering, lambda_metering) + lambda_metering = calculate_lambda_metering( + assessment_context, context, start_time + ) + section_document.metering = merge_metering_data( + section_document.metering, lambda_metering + ) except Exception as e: - logger.warning(f"Failed to add Lambda metering for assessment skip: {str(e)}") - + logger.warning( + f"Failed to add Lambda metering for assessment skip: {str(e)}" + ) + # Return consistent format for Map state collation response = { - "section_id": section_id, - "document": section_document.serialize_document(working_bucket, f"assessment_skip_{section_id}", logger) + "section_id": section_id, + "document": section_document.serialize_document( + working_bucket, f"assessment_skip_{section_id}", logger + ), } - - logger.info(f"Assessment skipped - Response: {json.dumps(response, default=str)}") + + logger.info( + f"Assessment skipped - Response: {json.dumps(response, default=str)}" + ) return response else: - logger.info(f"Assessment needed for section {section_id} - no explainability_info found in extraction results") + logger.info( + f"Assessment needed for section {section_id} - no explainability_info found in extraction results" + ) except Exception as e: - logger.warning(f"Error checking extraction results for assessment skip: {e}") + logger.warning( + f"Error checking extraction results for assessment skip: {e}" + ) # Continue with normal assessment if check fails # Normal assessment processing document.status = Status.ASSESSING # Update document status to ASSESSING for UI only - # Create new 'shell' document since our input document has only 1 section. + # Create new 'shell' document since our input document has only 1 section. docStatus = Document( id=document.id, input_key=document.input_key, @@ -137,34 +165,45 @@ def handler(event, context): logger.info(f"Updating document status to {docStatus.status}") document_service.update_document(docStatus) - # Initialize assessment service - assessment_service = assessment.AssessmentService(config=config) + # Initialize granular assessment service + from idp_common.assessment.granular_service import GranularAssessmentService + + cache_table = os.environ.get("TRACKING_TABLE") + assessment_service = GranularAssessmentService( + config=config, cache_table=cache_table + ) # Process the document section for assessment t0 = time.time() logger.info(f"Starting assessment for section {section_id}") updated_document = assessment_service.process_document_section(document, section_id) t1 = time.time() - logger.info(f"Total extraction time: {t1-t0:.2f} seconds") + logger.info(f"Total extraction time: {t1 - t0:.2f} seconds") # Check if document processing failed if updated_document.status == Status.FAILED: error_message = f"Assessment failed for document {updated_document.id}, section {section_id}" logger.error(error_message) raise Exception(error_message) - + # Add Lambda metering for successful assessment execution with dynamic context try: - lambda_metering = calculate_lambda_metering(assessment_context, context, start_time) - updated_document.metering = merge_metering_data(updated_document.metering, lambda_metering) + lambda_metering = calculate_lambda_metering( + assessment_context, context, start_time + ) + updated_document.metering = merge_metering_data( + updated_document.metering, lambda_metering + ) except Exception as e: logger.warning(f"Failed to add Lambda metering for assessment: {str(e)}") - + # Prepare output with automatic compression if needed result = { - 'document': updated_document.serialize_document(working_bucket, f"assessment_{section_id}", logger), - 'section_id': section_id + "document": updated_document.serialize_document( + working_bucket, f"assessment_{section_id}", logger + ), + "section_id": section_id, } - + logger.info("Assessment processing completed") return result diff --git a/scripts/test_grid_overlay.py b/scripts/test_grid_overlay.py new file mode 100644 index 00000000..1cfeb691 --- /dev/null +++ b/scripts/test_grid_overlay.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python3 +""" +Test script for grid overlay functionality. + +This script demonstrates how to: +1. Add ruler edges to a document image +2. Draw bounding boxes with normalized coordinates +3. Combine both features for LLM-assisted spatial localization + +Usage: + python scripts/test_grid_overlay.py [image_path] [output_dir] + +Examples: + # Use sample California license + python scripts/test_grid_overlay.py samples/old_cal_license.png + + # Use a PDF (first page will be converted) + python scripts/test_grid_overlay.py samples/lending_package.pdf + + # Specify output directory + python scripts/test_grid_overlay.py samples/old_cal_license.png /tmp/grid_test +""" + +import sys +import os +from pathlib import Path + +# Add the library to path +sys.path.insert(0, str(Path(__file__).parent.parent / "lib" / "idp_common_pkg")) + +from idp_common.grid_overlay import ( + add_ruler_edges, + draw_bounding_boxes, + add_ruler_and_draw_boxes, +) + + +def convert_pdf_to_image(pdf_path: str) -> bytes: + """Convert first page of PDF to image bytes.""" + try: + import fitz # PyMuPDF + + doc = fitz.open(pdf_path) + page = doc.load_page(0) # First page + + # Render at 150 DPI for good quality + pix = page.get_pixmap(dpi=150) + return pix.tobytes("jpeg") + except ImportError: + print("ERROR: PyMuPDF (fitz) is required for PDF conversion.") + print("Install it with: pip install PyMuPDF") + sys.exit(1) + + +def load_image(image_path: str) -> bytes: + """Load image from file path.""" + ext = Path(image_path).suffix.lower() + + if ext == ".pdf": + print(f"Converting PDF first page to image...") + return convert_pdf_to_image(image_path) + else: + with open(image_path, "rb") as f: + return f.read() + + +def main(): + # Default paths + default_image = "samples/old_cal_license.png" + default_output_dir = "output/grid_overlay_test" + + # Parse arguments + if len(sys.argv) > 1: + image_path = sys.argv[1] + else: + image_path = default_image + + if len(sys.argv) > 2: + output_dir = sys.argv[2] + else: + output_dir = default_output_dir + + # Resolve paths + script_dir = Path(__file__).parent + project_root = script_dir.parent + + if not os.path.isabs(image_path): + image_path = str(project_root / image_path) + + if not os.path.isabs(output_dir): + output_dir = str(project_root / output_dir) + + # Check if input exists + if not os.path.exists(image_path): + print(f"ERROR: Image not found: {image_path}") + print(f"Available samples in {project_root / 'samples'}:") + for f in (project_root / "samples").iterdir(): + if f.is_file(): + print(f" - {f.name}") + sys.exit(1) + + # Create output directory + os.makedirs(output_dir, exist_ok=True) + + print(f"Input image: {image_path}") + print(f"Output directory: {output_dir}") + print() + + # Load the image + print("Loading image...") + image_data = load_image(image_path) + print(f"Image size: {len(image_data)} bytes") + print() + + # ======================================== + # Test 1: Add ruler edges only + # ======================================== + print("Test 1: Adding ruler edges...") + ruler_image = add_ruler_edges( + image_data, + ruler_width=30, + tick_interval=50, + label_interval=100, + ) + + output_path_1 = os.path.join(output_dir, "01_ruler_edges.jpg") + with open(output_path_1, "wb") as f: + f.write(ruler_image) + print(f" Saved: {output_path_1}") + print() + + # ======================================== + # Test 2: Draw bounding boxes on original + # ======================================== + print("Test 2: Drawing bounding boxes on original image...") + + # Example bounding boxes (adjust these based on your image) + # These are in normalized 0-1000 scale + sample_bboxes = [ + { + "bbox": [50, 100, 400, 180], + "label": "Header Area", + "color": "red", + }, + { + "bbox": [100, 300, 600, 380], + "label": "Name Field", + "color": "green", + }, + { + "bbox": [100, 450, 500, 530], + "label": "Address", + "color": "blue", + }, + { + "bbox": [600, 500, 900, 600], + "label": "Photo Area", + "color": "orange", + }, + ] + + bbox_image = draw_bounding_boxes( + image_data, + sample_bboxes, + has_ruler=False, + box_color="red", + box_width=3, + ) + + output_path_2 = os.path.join(output_dir, "02_bounding_boxes.jpg") + with open(output_path_2, "wb") as f: + f.write(bbox_image) + print(f" Saved: {output_path_2}") + print(f" Bounding boxes drawn:") + for bbox in sample_bboxes: + print(f" - {bbox['label']}: {bbox['bbox']}") + print() + + # ======================================== + # Test 3: Ruler + Bounding boxes combined + # ======================================== + print("Test 3: Combining ruler edges and bounding boxes...") + + combined_image = add_ruler_and_draw_boxes( + image_data, + sample_bboxes, + ruler_width=30, + tick_interval=50, + label_interval=100, + box_color="red", + box_width=3, + ) + + output_path_3 = os.path.join(output_dir, "03_ruler_with_boxes.jpg") + with open(output_path_3, "wb") as f: + f.write(combined_image) + print(f" Saved: {output_path_3}") + print() + + # ======================================== + # Test 4: Fine-grained grid (25 unit ticks) + # ======================================== + print("Test 4: Fine-grained ruler (25 unit minor ticks)...") + fine_ruler = add_ruler_edges( + image_data, + ruler_width=35, + tick_interval=25, # Finer ticks + label_interval=100, + font_size=9, + ) + + output_path_4 = os.path.join(output_dir, "04_fine_ruler.jpg") + with open(output_path_4, "wb") as f: + f.write(fine_ruler) + print(f" Saved: {output_path_4}") + print() + + # ======================================== + # Test 5: Different box colors + # ======================================== + print("Test 5: Multi-colored bounding boxes...") + + multi_color_bboxes = [ + {"bbox": [50, 50, 200, 150], "label": "Red", "color": "red"}, + {"bbox": [250, 50, 400, 150], "label": "Green", "color": "green"}, + {"bbox": [450, 50, 600, 150], "label": "Blue", "color": "blue"}, + {"bbox": [50, 200, 200, 300], "label": "Yellow", "color": "yellow"}, + {"bbox": [250, 200, 400, 300], "label": "Orange", "color": "orange"}, + {"bbox": [450, 200, 600, 300], "label": "Purple", "color": "purple"}, + ] + + multi_color_image = draw_bounding_boxes( + image_data, + multi_color_bboxes, + has_ruler=False, + ) + + output_path_5 = os.path.join(output_dir, "05_multi_color_boxes.jpg") + with open(output_path_5, "wb") as f: + f.write(multi_color_image) + print(f" Saved: {output_path_5}") + print() + + # ======================================== + # Summary + # ======================================== + print("=" * 60) + print("Grid Overlay Test Complete!") + print("=" * 60) + print() + print("Generated files:") + for i, path in enumerate( + [output_path_1, output_path_2, output_path_3, output_path_4, output_path_5], 1 + ): + print(f" {i}. {path}") + print() + print("Next steps:") + print(" 1. Open the generated images to see the grid overlays") + print(" 2. Note how the ruler edges provide coordinate references") + print(" 3. Observe how bounding boxes are labeled with their coordinates") + print() + print("To use in assessment:") + print(" - Add ruler edges to document images before sending to LLM") + print(" - Update prompt to instruct LLM to read coordinates from ruler") + print(" - LLM can now provide precise [x1, y1, x2, y2] coordinates") + print() + + # Interactive demo: Let user test their own coordinates + print("=" * 60) + print("Interactive Bounding Box Test") + print("=" * 60) + print("You can test drawing custom bounding boxes.") + print("Enter coordinates in format: x1,y1,x2,y2 (0-1000 scale)") + print("Example: 100,200,400,250") + print("Type 'quit' to exit") + print() + + custom_bboxes = [] + while True: + try: + user_input = input("Enter bbox coordinates (or 'quit'): ").strip() + if user_input.lower() == "quit": + break + + coords = [int(x.strip()) for x in user_input.split(",")] + if len(coords) != 4: + print(" Invalid format. Use: x1,y1,x2,y2") + continue + + label = input(" Label for this box (press Enter for default): ").strip() + if not label: + label = f"Box {len(custom_bboxes) + 1}" + + custom_bboxes.append( + { + "bbox": coords, + "label": label, + "color": ["red", "green", "blue", "orange", "purple"][ + len(custom_bboxes) % 5 + ], + } + ) + + print(f" Added: {label} at {coords}") + + except KeyboardInterrupt: + print("\nExiting...") + break + except Exception as e: + print(f" Error: {e}") + + if custom_bboxes: + print(f"\nDrawing {len(custom_bboxes)} custom bounding boxes...") + custom_image = add_ruler_and_draw_boxes(image_data, custom_bboxes) + + output_path_custom = os.path.join(output_dir, "06_custom_boxes.jpg") + with open(output_path_custom, "wb") as f: + f.write(custom_image) + print(f"Saved: {output_path_custom}") + + print("\nDone!") + + +if __name__ == "__main__": + main() From 1371f29163e732bc2fb59b3e580c12888149ce95 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Tue, 18 Nov 2025 17:33:01 +0200 Subject: [PATCH 06/30] typed metadata model --- .../idp_common/assessment/granular_service.py | 55 ++++++++----------- 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index ccddbe67..aa9c1481 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -30,8 +30,10 @@ X_AWS_IDP_CONFIDENCE_THRESHOLD, X_AWS_IDP_DOCUMENT_TYPE, ) +from idp_common.extraction.models import ExtractionData, ExtractionMetadata from idp_common.models import Document, Status -from idp_common.utils import check_token_limit, grid_overlay +from idp_common.utils import check_token_limit +from idp_common.utils.grid_overlay import add_ruler_edges logger = logging.getLogger(__name__) @@ -869,16 +871,10 @@ def process_document_section(self, document: Document, section_id: str) -> Docum Document: Updated Document object with assessment results appended to extraction results """ # Check if assessment is enabled in typed configuration - enabled = self.config.assessment.enabled - if not enabled: + if not self.config.assessment.enabled: logger.info("Assessment is disabled via configuration") return document - # Validate input document - if not document: - logger.error("No document provided") - return document - if not document.sections: logger.error("Document has no sections to process") document.errors.append("Document has no sections to process") @@ -931,8 +927,9 @@ def process_document_section(self, document: Document, section_id: str) -> Docum try: # Read existing extraction results t0 = time.time() - extraction_data = s3.get_json_content(section.extraction_result_uri) - extraction_results = extraction_data.get("inference_result", {}) + extraction_data_dict = s3.get_json_content(section.extraction_result_uri) + extraction_data = ExtractionData.model_validate(extraction_data_dict) + extraction_results = extraction_data.inference_result # Skip assessment if no extraction results found if not extraction_results: @@ -997,12 +994,6 @@ def process_document_section(self, document: Document, section_id: str) -> Docum t4 = time.time() logger.info(f"Time taken to read raw OCR results: {t4 - t3:.2f} seconds") - # Get assessment configuration (type-safe, Pydantic handles conversions) - model_id = self.config.assessment.model - temperature = self.config.assessment.temperature - max_tokens = self.config.assessment.max_tokens - system_prompt = self.config.assessment.system_prompt - # Get schema for this document class class_schema = self._get_class_schema(class_label) if not class_schema: @@ -1053,7 +1044,7 @@ def process_document_section(self, document: Document, section_id: str) -> Docum # Apply grid overlay to page images for assessment grid_page_images = [] for page_img in page_images: - grid_img = grid_overlay.add_grid_overlay(page_img) + grid_img = add_ruler_edges(page_img) grid_page_images.append(grid_img) # Execute tasks using Strands-based parallel executor @@ -1070,10 +1061,10 @@ def process_document_section(self, document: Document, section_id: str) -> Docum extraction_results=extraction_results, page_images=grid_page_images, sorted_page_ids=sorted_page_ids, - model_id=model_id, - system_prompt=system_prompt, - temperature=temperature, - max_tokens=max_tokens, + model_id=self.config.assessment.model, + system_prompt=self.config.assessment.system_prompt, + temperature=self.config.assessment.temperature, + max_tokens=self.config.assessment.max_tokens, max_concurrent=self.max_workers, ) ) @@ -1232,21 +1223,21 @@ def process_document_section(self, document: Document, section_id: str) -> Docum f"Document will be marked as failed without retry." ) - # Update the existing extraction result with enhanced assessment data - extraction_data["explainability_info"] = [enhanced_assessment_data] - extraction_data["metadata"] = extraction_data.get("metadata", {}) - extraction_data["metadata"]["assessment_time_seconds"] = total_duration - extraction_data["metadata"]["granular_assessment_used"] = True - extraction_data["metadata"]["assessment_tasks_total"] = len(tasks) - extraction_data["metadata"]["assessment_tasks_successful"] = len( - successful_tasks - ) - extraction_data["metadata"]["assessment_tasks_failed"] = len(failed_tasks) + # Update the existing extraction result with enhanced assessment data (typed) + extraction_data.explainability_info = [enhanced_assessment_data] + extraction_data.metadata.assessment_time_seconds = total_duration + extraction_data.metadata.granular_assessment_used = True + extraction_data.metadata.assessment_tasks_total = len(tasks) + extraction_data.metadata.assessment_tasks_successful = len(successful_tasks) + extraction_data.metadata.assessment_tasks_failed = len(failed_tasks) # Write the updated result back to S3 bucket, key = utils.parse_s3_uri(section.extraction_result_uri) s3.write_content( - extraction_data, bucket, key, content_type="application/json" + extraction_data.model_dump(mode="json"), + bucket, + key, + content_type="application/json", ) # Update the section in the document with confidence threshold alerts From fe56e98d071e437f07715cb3c00b19c8ffe32879 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 19 Nov 2025 11:58:14 +0200 Subject: [PATCH 07/30] fixes --- .../idp_common/assessment/granular_service.py | 180 +++++-------- .../idp_common/assessment/models.py | 78 ++++-- .../idp_common/assessment/strands_executor.py | 14 +- .../idp_common/assessment/strands_service.py | 159 +++++++----- .../idp_common/assessment/strands_tools.py | 30 ++- .../idp_common/config/models.py | 4 +- .../idp_common/image/__init__.py | 44 +++- .../idp_common/utils/grid_overlay.py | 6 +- .../idp_common/utils/pdf_helpers.py | 242 ++++++++++++++++++ lib/idp_common_pkg/uv.lock | 29 ++- lib/idp_common_pkg/verify_stickler.py | 6 +- 11 files changed, 545 insertions(+), 247 deletions(-) create mode 100644 lib/idp_common_pkg/idp_common/utils/pdf_helpers.py diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index aa9c1481..83d6455e 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -12,13 +12,14 @@ """ import json -import logging import os import time -from dataclasses import dataclass from typing import Any, Dict, List, Optional, Tuple, Union +from aws_lambda_powertools import Logger + from idp_common import image, metrics, s3, utils +from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_executor import execute_assessment_tasks_parallel from idp_common.config.models import IDPConfig from idp_common.config.schema_constants import ( @@ -35,44 +36,7 @@ from idp_common.utils import check_token_limit from idp_common.utils.grid_overlay import add_ruler_edges -logger = logging.getLogger(__name__) - - -@dataclass -class AssessmentTask: - """Single-field assessment task for Strands executor.""" - - task_id: str - task_type: str # Always "attribute" - single field assessment - - # Path to field as tuple: ("address", "street") or ("items", 0, "price") - field_path: Tuple[Union[str, int], ...] - - # The field name being assessed (last element of path) - field_name: str - - # Schema for this specific field only - field_schema: Dict[str, Any] - - # Confidence threshold for this field - confidence_threshold: float - - # Direct reference to parent container in assessment structure (for O(1) insertion) - # Can be Dict for regular fields or List for array items - parent_assessment_dict: Union[Dict[str, Any], List[Any]] - - -@dataclass -class AssessmentResult: - """Result of a single assessment task.""" - - task_id: str - success: bool - assessment_data: Dict[str, Any] - confidence_alerts: List[Dict[str, Any]] - error_message: Optional[str] = None - processing_time: float = 0.0 - metering: Optional[Dict[str, Any]] = None +logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) def _safe_float_conversion(value: Any, default: float = 0.0) -> float: @@ -114,49 +78,13 @@ def _safe_float_conversion(value: Any, default: float = 0.0) -> float: return default -def _get_value_at_path(data: Dict[str, Any], path: Tuple[Union[str, int], ...]) -> Any: - """ - Navigate nested data structure using tuple path. - - Args: - data: Nested dictionary/list structure - path: Tuple of keys/indices like ("address", "street") or ("items", 0, "price") - - Returns: - Value at the specified path, or None if path doesn't exist - - Examples: - >>> data = {"address": {"street": "123 Main St"}} - >>> _get_value_at_path(data, ("address", "street")) - "123 Main St" - - >>> data = {"items": [{"price": 10.99}, {"price": 20.99}]} - >>> _get_value_at_path(data, ("items", 0, "price")) - 10.99 - """ - current = data - for key in path: - if current is None: - return None - if isinstance(current, dict): - current = current.get(key) - elif isinstance(current, list): - if isinstance(key, int) and 0 <= key < len(current): - current = current[key] - else: - return None - else: - return None - return current - - class GranularAssessmentService: """Enhanced assessment service with granular, cached, and parallel processing.""" def __init__( self, region: str | None = None, - config: Dict[str, Any] | IDPConfig | None = None, + config: dict[str, Any] | IDPConfig | None = None, cache_table: str | None = None, ): """ @@ -194,8 +122,10 @@ def __init__( if self.cache_table_name: import boto3 - dynamodb = boto3.resource("dynamodb", region_name=self.region) - self.cache_table = dynamodb.Table(self.cache_table_name) # type: ignore[attr-defined] + dynamodb: DynamoDBServiceResource = boto3.resource( + "dynamodb", region_name=self.region + ) # pyright: ignore[reportAssignmentType]modb", region_name=self.region) + self.cache_table = dynamodb.Table(self.cache_table_name) logger.info( f"Granular assessment caching enabled using table: {self.cache_table_name}" ) @@ -220,7 +150,7 @@ def __init__( f"caching={'enabled' if self.cache_table else 'disabled'}" ) - def _get_class_schema(self, class_label: str) -> Dict[str, Any]: + def _get_class_schema(self, class_label: str) -> dict[str, Any]: """ Get JSON Schema for a specific document class. @@ -238,7 +168,7 @@ def _get_class_schema(self, class_label: str) -> Dict[str, Any]: return {} def _get_confidence_threshold_by_path( - self, properties: Dict[str, Any], path: str, default: float = 0.9 + self, properties: dict[str, Any], path: str, default: float = 0.9 ) -> float: """ Get confidence threshold for a property path (e.g., 'CompanyAddress.Street'). @@ -283,10 +213,10 @@ def _get_confidence_threshold_by_path( def _create_assessment_tasks( self, - extraction_results: Dict[str, Any], - properties: Dict[str, Any], + extraction_results: dict[str, Any], + properties: dict[str, Any], default_confidence_threshold: float, - ) -> Tuple[List[AssessmentTask], Dict[str, Any]]: + ) -> tuple[list[AssessmentTask], dict[str, Any]]: """ Create assessment tasks and pre-build assessment structure. @@ -302,18 +232,18 @@ def _create_assessment_tasks( Returns: Tuple of (tasks, assessment_structure) - - tasks: List of AssessmentTask objects - - assessment_structure: Dict mirroring extraction_results shape + - tasks: list of AssessmentTask objects + - assessment_structure: dict mirroring extraction_results shape """ - tasks: List[AssessmentTask] = [] - assessment_structure: Dict[str, Any] = {} + tasks: list[AssessmentTask] = [] + assessment_structure: dict[str, Any] = {} task_counter = [0] # Use list for mutable counter in nested function def _traverse( - schema_props: Dict[str, Any], - extraction_data: Dict[str, Any], - current_path: Tuple[Union[str, int], ...], - parent_dict: Dict[str, Any], + schema_props: dict[str, Any], + extraction_data: dict[str, Any], + current_path: tuple[str | int, ...], + parent_dict: dict[str, Any], ) -> None: """ Recursively traverse schema and extraction data to build tasks and structure. @@ -334,7 +264,7 @@ def _traverse( if prop_type == TYPE_OBJECT and isinstance(prop_value, dict): # Create nested dict in assessment structure - nested_dict: Dict[str, Any] = {} + nested_dict: dict[str, Any] = {} parent_dict[prop_name] = nested_dict # Recurse into nested object @@ -343,7 +273,7 @@ def _traverse( elif prop_type == TYPE_ARRAY and isinstance(prop_value, list): # Create list in assessment structure - assessment_list: List[Any] = [] + assessment_list: list[Any] = [] parent_dict[prop_name] = assessment_list # Process each array item @@ -355,7 +285,7 @@ def _traverse( if item_type == TYPE_OBJECT and isinstance(item_value, dict): # Create dict for this array item - item_dict: Dict[str, Any] = {} + item_dict: dict[str, Any] = {} assessment_list.append(item_dict) # Recurse into array item properties @@ -438,7 +368,7 @@ def _get_cache_key( def _get_cached_assessment_tasks( self, document_id: str, workflow_execution_arn: str, section_id: str - ) -> Dict[str, AssessmentResult]: + ) -> dict[str, AssessmentResult]: """ Retrieve cached assessment task results for a document section. @@ -448,7 +378,7 @@ def _get_cached_assessment_tasks( section_id: Section ID Returns: - Dictionary mapping task_id to cached AssessmentResult, empty dict if no cache + dictionary mapping task_id to cached AssessmentResult, empty dict if no cache """ logger.info( f"Attempting to retrieve cached assessment tasks for document {document_id} section {section_id}" @@ -476,8 +406,6 @@ def _get_cached_assessment_tasks( # Extract task results from JSON attribute if "task_results" in cached_data: try: - import json - task_data_list = json.loads(cached_data["task_results"]) for task_data in task_data_list: @@ -515,7 +443,7 @@ def _cache_successful_assessment_tasks( document_id: str, workflow_execution_arn: str, section_id: str, - task_results: List[AssessmentResult], + task_results: list[AssessmentResult], ) -> None: """ Cache successful assessment task results to DynamoDB as a JSON-serialized list. @@ -524,7 +452,7 @@ def _cache_successful_assessment_tasks( document_id: Document ID workflow_execution_arn: Workflow execution ARN section_id: Section ID - task_results: List of successful assessment task results + task_results: list of successful assessment task results """ if not self.cache_table or not task_results: return @@ -608,16 +536,16 @@ def _is_throttling_exception(self, exception: Exception) -> bool: def _aggregate_assessment_results( self, - tasks: List[AssessmentTask], - results: List[AssessmentResult], - assessment_structure: Dict[str, Any], - ) -> Tuple[Dict[str, Any], List[Dict[str, Any]], Dict[str, Any]]: + tasks: list[AssessmentTask], + results: list[AssessmentResult], + assessment_structure: dict[str, Any], + ) -> tuple[dict[str, Any], list[dict[str, Any]], dict[str, Any]]: """ Aggregate individual task results into assessment structure using direct parent insertion. Args: - tasks: List of assessment tasks - results: List of assessment results + tasks: list of assessment tasks + results: list of assessment results assessment_structure: Pre-built assessment structure from _create_assessment_tasks Returns: @@ -717,17 +645,17 @@ def _get_text_confidence_data(self, page) -> str: return "Text Confidence Data Unavailable" def _convert_bbox_to_geometry( - self, bbox_coords: List[float], page_num: int - ) -> Dict[str, Any]: + self, bbox_coords: list[float], page_num: int + ) -> dict[str, Any]: """ Convert [x1,y1,x2,y2] coordinates to geometry format. Args: - bbox_coords: List of 4 coordinates [x1, y1, x2, y2] in 0-1000 scale + bbox_coords: list of 4 coordinates [x1, y1, x2, y2] in 0-1000 scale page_num: Page number where the bounding box appears Returns: - Dictionary in geometry format compatible with pattern-1 UI + dictionary in geometry format compatible with pattern-1 UI """ if len(bbox_coords) != 4: raise ValueError(f"Expected 4 coordinates, got {len(bbox_coords)}") @@ -750,8 +678,8 @@ def _convert_bbox_to_geometry( } def _process_single_assessment_geometry( - self, attr_assessment: Dict[str, Any], attr_name: str = "" - ) -> Dict[str, Any]: + self, attr_assessment: dict[str, Any], attr_name: str = "" + ) -> dict[str, Any]: """ Process geometry data for a single assessment (with confidence key). @@ -808,8 +736,8 @@ def _process_single_assessment_geometry( return enhanced_attr def _extract_geometry_from_assessment( - self, assessment_data: Dict[str, Any] - ) -> Dict[str, Any]: + self, assessment_data: dict[str, Any] + ) -> dict[str, Any]: """ Extract geometry data from assessment response and convert to proper format. Now supports recursive processing of nested group attributes. @@ -960,6 +888,9 @@ def process_document_section(self, document: Document, section_id: str) -> Docum # Read page images with configurable dimensions (type-safe access) target_width = self.config.assessment.image.target_width target_height = self.config.assessment.image.target_height + logger.info( + f"Image resize config: target_width={target_width}, target_height={target_height}" + ) page_images = [] for page_id in sorted_page_ids: @@ -968,9 +899,12 @@ def process_document_section(self, document: Document, section_id: str) -> Docum page = document.pages[page_id] image_uri = page.image_uri - # Just pass the values directly - prepare_image handles empty strings/None + # For assessment, convert to PNG for better compression with rulers/overlays image_content = image.prepare_image( - image_uri, target_width, target_height + image_uri, target_width, target_height, output_format="PNG" + ) + logger.info( + f"Loaded page {page_id} image as PNG: {len(image_content):,} bytes" ) page_images.append(image_content) @@ -1043,8 +977,11 @@ def process_document_section(self, document: Document, section_id: str) -> Docum # Apply grid overlay to page images for assessment grid_page_images = [] - for page_img in page_images: + for idx, page_img in enumerate(page_images): grid_img = add_ruler_edges(page_img) + logger.info( + f"Added ruler overlay to page {idx}: {len(page_img):,} bytes -> {len(grid_img):,} bytes" + ) grid_page_images.append(grid_img) # Execute tasks using Strands-based parallel executor @@ -1065,6 +1002,7 @@ def process_document_section(self, document: Document, section_id: str) -> Docum system_prompt=self.config.assessment.system_prompt, temperature=self.config.assessment.temperature, max_tokens=self.config.assessment.max_tokens, + document_schema=class_schema, max_concurrent=self.max_workers, ) ) @@ -1354,10 +1292,10 @@ def assess_document(self, document: Document) -> Document: def _handle_parsing_errors( self, document: Document, - failed_tasks: List[str], + failed_tasks: list[str], document_text: str, - extraction_results: Dict, - ) -> Optional[str]: + extraction_results: dict, + ) -> str | None: """Handle multiple parsing errors with user-friendly messaging.""" # Check for token limit issues token_warning = check_token_limit( diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index 64f480db..f76361c3 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -8,12 +8,12 @@ the confidence and accuracy of extraction results. """ -from dataclasses import dataclass -from typing import Any, Dict, List, Optional +from typing import Any +from pydantic import BaseModel, Field -@dataclass -class AttributeAssessment: + +class AttributeAssessment(BaseModel): """Assessment result for a single extracted attribute""" attribute_name: str @@ -22,30 +22,74 @@ class AttributeAssessment: extracted_value: Any = None -@dataclass -class AssessmentResult: - """Result of assessment for a document section""" +class LegacyAssessmentResult(BaseModel): + """Legacy result of assessment for a document section (for backwards compatibility)""" section_id: str document_class: str - attribute_assessments: List[AttributeAssessment] + attribute_assessments: list[AttributeAssessment] overall_confidence: float = 0.0 - raw_response: Optional[str] = None - metering: Optional[Dict[str, Any]] = None - metadata: Optional[Dict[str, Any]] = None - output_uri: Optional[str] = None + raw_response: str | None = None + metering: dict[str, Any] | None = None + metadata: dict[str, Any] | None = None + output_uri: str | None = None -@dataclass -class DocumentAssessmentResult: +class DocumentAssessmentResult(BaseModel): """Assessment result for an entire document""" document_id: str - section_assessments: List[AssessmentResult] + section_assessments: list[LegacyAssessmentResult] overall_document_confidence: float = 0.0 total_attributes_assessed: int = 0 high_confidence_attributes: int = 0 medium_confidence_attributes: int = 0 low_confidence_attributes: int = 0 - assessment_summary: Optional[str] = None - metadata: Optional[Dict[str, Any]] = None + assessment_summary: str | None = None + metadata: dict[str, Any] | None = None + + +# ============================================================================ +# Assessment Task Model (unified for all assessment services) +# ============================================================================ + + +class AssessmentTask(BaseModel): + """ + Single-field assessment task for granular assessment. + + Used by both granular_service.py (creation) and strands_service.py (execution). + """ + + model_config = {"arbitrary_types_allowed": True} + + task_id: str + task_type: str = Field(description="Always 'attribute' - single field assessment") + + # Path to field as tuple: ("address", "street") or ("items", 0, "price") + field_path: tuple[str | int, ...] + + # The field name being assessed (last element of path) + field_name: str + + # Schema for this specific field only + field_schema: dict[str, Any] + + # Confidence threshold for this field + confidence_threshold: float + + # Direct reference to parent container in assessment structure (for O(1) insertion) + # Can be Dict for regular fields or list for array items + parent_assessment_dict: dict[str, Any] | list[Any] + + +class AssessmentResult(BaseModel): + """Result of a single assessment task (used by both granular and strands services).""" + + task_id: str + success: bool + assessment_data: dict[str, Any] + confidence_alerts: list[dict[str, Any]] + error_message: str | None = None + processing_time: float = 0.0 + metering: dict[str, Any] | None = None diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py index ba91fc5b..7b0d3015 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py @@ -12,11 +12,8 @@ from aws_lambda_powertools import Logger -from idp_common.assessment.strands_service import ( - AssessmentResult, - AssessmentTask, - assess_attribute_with_strands, -) +from idp_common.assessment.models import AssessmentResult, AssessmentTask +from idp_common.assessment.strands_service import assess_attribute_with_strands from idp_common.utils import merge_metering_data logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) @@ -31,6 +28,7 @@ async def execute_tasks_async( system_prompt: str, temperature: float, max_tokens: int, + document_schema: dict[str, Any], max_concurrent: int = 5, max_retries: int = 7, connect_timeout: float = 10.0, @@ -49,6 +47,7 @@ async def execute_tasks_async( system_prompt: System prompt temperature: Model temperature max_tokens: Max tokens + document_schema: Full document JSON schema max_concurrent: Maximum concurrent tasks (default 5) max_retries: Maximum retry attempts connect_timeout: Connection timeout in seconds @@ -80,6 +79,7 @@ async def execute_with_semaphore(task: AssessmentTask) -> AssessmentResult: system_prompt=system_prompt, temperature=temperature, max_tokens=max_tokens, + document_schema=document_schema, max_retries=max_retries, connect_timeout=connect_timeout, read_timeout=read_timeout, @@ -152,6 +152,7 @@ def execute_assessment_tasks_parallel( system_prompt: str, temperature: float, max_tokens: int, + document_schema: dict[str, Any], max_concurrent: int = 5, max_retries: int = 7, connect_timeout: float = 10.0, @@ -173,6 +174,7 @@ def execute_assessment_tasks_parallel( system_prompt: System prompt temperature: Temperature max_tokens: Max tokens + document_schema: Full document JSON schema max_concurrent: Max concurrent tasks (default 5) max_retries: Maximum retry attempts connect_timeout: Connection timeout in seconds @@ -201,6 +203,7 @@ def execute_assessment_tasks_parallel( system_prompt=system_prompt, temperature=temperature, max_tokens=max_tokens, + document_schema=document_schema, max_concurrent=max_concurrent, max_retries=max_retries, connect_timeout=connect_timeout, @@ -225,6 +228,7 @@ def execute_assessment_tasks_parallel( system_prompt=system_prompt, temperature=temperature, max_tokens=max_tokens, + document_schema=document_schema, max_concurrent=max_concurrent, max_retries=max_retries, connect_timeout=connect_timeout, diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index cb665e98..b3df37ca 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -12,42 +12,18 @@ from aws_lambda_powertools import Logger from botocore.config import Config -from pydantic import BaseModel from strands import Agent, tool from strands.agent.conversation_manager import SummarizingConversationManager from strands.models.bedrock import BedrockModel from strands.types.content import ContentBlock, Message +from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_models import AssessmentOutput from idp_common.assessment.strands_tools import create_strands_tools logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) -# Pydantic versions of AssessmentTask/Result for Strands compatibility -# Note: granular_service has dataclass versions - these are separate for Strands -class AssessmentTask(BaseModel): - """Assessment task definition.""" - - task_id: str - task_type: str - attributes: list[str] - task_schema: dict[str, Any] - confidence_thresholds: dict[str, float] - - -class AssessmentResult(BaseModel): - """Assessment result.""" - - task_id: str - success: bool - assessment_data: dict[str, Any] - confidence_alerts: list[dict[str, Any]] - error_message: str | None = None - processing_time: float = 0.0 - metering: dict[str, Any] | None = None - - def create_submit_assessment_tool(): """ Create a tool for submitting assessment results. @@ -99,6 +75,7 @@ async def assess_attribute_with_strands( system_prompt: str, temperature: float, max_tokens: int, + document_schema: dict[str, Any], max_retries: int = 7, connect_timeout: float = 10.0, read_timeout: float = 300.0, @@ -131,10 +108,15 @@ async def assess_attribute_with_strands( submit_tool = create_submit_assessment_tool() tools = base_tools + [submit_tool] - # 2. Build task-specific prompt - task_prompt = _build_task_prompt(task, extraction_results, len(page_images)) + # 2. Build enhanced system prompt with schema and extraction (for caching) + enhanced_system_prompt = _build_system_prompt_with_context( + system_prompt, document_schema, extraction_results, len(page_images) + ) - # 3. Create Bedrock model config (following agentic_idp.py pattern) + # 3. Build minimal task-specific prompt (just field path and threshold) + task_prompt = _build_task_prompt(task) + + # 4. Create Bedrock model config (following agentic_idp.py pattern) boto_config = Config( retries={ "max_attempts": max_retries, @@ -151,18 +133,18 @@ async def assess_attribute_with_strands( "boto_client_config": boto_config, } - # 4. Initialize Strands agent + # 5. Initialize Strands agent agent = Agent( model=BedrockModel(**model_config), tools=tools, - system_prompt=system_prompt, + system_prompt=enhanced_system_prompt, state={ "task": task.model_dump(), "extraction_results": extraction_results, "assessment_output": None, }, conversation_manager=SummarizingConversationManager( - summary_ratio=0.8, preserve_recent_messages=2 + summary_ratio=0.8, preserve_recent_messages=1 ), ) @@ -175,7 +157,7 @@ async def assess_attribute_with_strands( extra={ "task_id": task.task_id, "task_type": task.task_type, - "attributes": task.attributes, + "field_name": task.field_name, }, ) @@ -193,12 +175,29 @@ async def assess_attribute_with_strands( # Validate to Pydantic model assessment_output = AssessmentOutput(**assessment_dict) - # Validate that agent assessed exactly the expected field - expected_field = task.attributes[0] # Task assesses exactly one field - if assessment_output.field_name != expected_field: + # Validate that agent assessed the expected field + # The agent may return: + # - Just the field name: "Street" + # - Full path with dots: "VendorAddress.Street" + # - Full path with array indices: "Items[0].Description" + # We accept any of these as long as the expected field_name appears + expected_field = task.field_name + assessed_field = assessment_output.field_name + + # Check if fields match: + # 1. Exact match + # 2. Expected field is at the end after a dot: "VendorAddress.Street" ends with ".Street" + # 3. Expected field is at the end after bracket: "Items[0].Description" ends with ".Description" + if not ( + assessed_field == expected_field + or assessed_field.endswith(f".{expected_field}") + or assessed_field.endswith(f"]{expected_field}") + or f".{expected_field}" in assessed_field + or f"]{expected_field}" in assessed_field + ): raise ValueError( f"Agent assessed wrong field: expected '{expected_field}', " - f"got '{assessment_output.field_name}'" + f"got '{assessed_field}'" ) # 8. Extract metering from response @@ -259,51 +258,44 @@ async def assess_attribute_with_strands( ) -def _build_task_prompt( - task: AssessmentTask, +def _build_system_prompt_with_context( + base_system_prompt: str, + document_schema: dict[str, Any], extraction_results: dict[str, Any], num_images: int, ) -> str: """ - Build prompt for assessing a single field. + Build system prompt with full schema and extraction results for prompt caching. - Includes: - - Clear field path (e.g., "address.street" or "items[2].price") - - Full extraction results for context - - Schema and threshold for the specific field - - Instructions for using images and tools + This puts the static/cacheable content (schema, extraction, general instructions) + in the system prompt, which benefits from prompt caching. Args: - task: Assessment task for one specific field - extraction_results: Complete extraction results (arbitrarily nested) + base_system_prompt: Base assessment system prompt + document_schema: Full JSON schema for the document class + extraction_results: Complete extraction results num_images: Number of available page images Returns: - Formatted prompt string + Enhanced system prompt with schema and extraction context """ - # Get the single field being assessed - field_path = task.attributes[ - 0 - ] # e.g., "name" or "address.street" or "items[0].price" - threshold = list(task.confidence_thresholds.values())[0] + return f"""{base_system_prompt} - prompt = f"""# Confidence Assessment Task +## Document Schema -You are assessing the confidence of a SINGLE extracted field from a document. +Below is the full JSON schema for this document type. This defines all fields, their types, and confidence thresholds. -## Field to Assess -**Field Path**: `{field_path}` -**Confidence Threshold**: {threshold} +```json +{json.dumps(document_schema, indent=2)} +``` ## Complete Extraction Results -(Full document context - locate the value for `{field_path}`) -{json.dumps(extraction_results, indent=2)} -## Field Schema -{json.dumps(task.task_schema, indent=2)} +Below are the complete extraction results for the document being assessed. When assessing a specific field, locate its value in this structure. -## Your Task -Assess ONLY the field `{field_path}`. Do not assess any other fields. +```json +{json.dumps(extraction_results, indent=2)} +``` ## Available Document Images @@ -335,14 +327,45 @@ def _build_task_prompt( Example: {{"x1": 150, "y1": 220, "x2": 380, "y2": 245, "page": 1}} -## Output Schema +**Important**: You MUST call `submit_assessment` to complete each task. +""" + + +def _build_task_prompt(task: AssessmentTask) -> str: + """ + Build minimal task-specific prompt for assessing a single field. + + This is minimal (just field path and threshold) to maximize the benefit + of caching the system prompt which contains the schema and extraction. + + Args: + task: Assessment task for one specific field + + Returns: + Minimal task prompt string + """ + # Convert field_path tuple to string representation + # e.g., ("address", "street") -> "address.street" + # e.g., ("items", 0, "price") -> "items[0].price" + path_parts = [] + for part in task.field_path: + if isinstance(part, int): + path_parts[-1] = f"{path_parts[-1]}[{part}]" + else: + path_parts.append(str(part)) + field_path_str = ".".join(path_parts) + + return f"""# Assessment Task + +Assess the confidence of this field: + +**Field Path**: `{field_path_str}` +**Confidence Threshold**: {task.confidence_threshold} -Your assessment must match the {task.task_type} schema. -Use the `submit_assessment` tool when ready with a complete assessment dict. +Locate the value for `{field_path_str}` in the extraction results provided in the system context, verify it against the document images, and submit your assessment. -**Important**: You MUST call `submit_assessment` to complete this task. +You MUST assess ONLY this field - do not assess any other fields. """ - return prompt def _convert_to_assessment_result( diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 6ab9fd33..a7b18a6f 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -11,6 +11,7 @@ from aws_lambda_powertools import Logger from pydantic import BaseModel, Field from strands import Agent, tool +from strands.types.content import ContentBlock, ImageContent from idp_common.assessment.strands_models import BoundingBox from idp_common.utils.grid_overlay import draw_bounding_boxes @@ -48,7 +49,7 @@ def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) """ @tool - def view_image(input_data: dict[str, Any], agent: Agent) -> str: + def view_image(input_data: dict[str, Any], agent: Agent) -> ImageContent: """ View a specific page image, optionally highlighting a bounding box area. @@ -62,7 +63,7 @@ def view_image(input_data: dict[str, Any], agent: Agent) -> str: - label (str, optional): Label for the bounding box Returns: - Success message with image details + ImageContent object that the LLM can see Example: view_image({ @@ -116,26 +117,29 @@ def view_image(input_data: dict[str, Any], agent: Agent) -> str: }, ) - # Store the image in agent state using standardized key - image_key = f"page_{view_input.image_index}_{page_id}" - agent.state.set(image_key, img_bytes) - logger.info( - "Stored image in agent state", + "Returning image to agent", extra={ "image_index": view_input.image_index, "page_id": page_id, "has_bbox": view_input.bounding_box is not None, + "image_size_bytes": len(img_bytes), }, ) - bbox_info = "" - if view_input.bounding_box: - bbox_info = f"\nHighlighted region: [{view_input.bounding_box.x1}, {view_input.bounding_box.y1}, {view_input.bounding_box.x2}, {view_input.bounding_box.y2}] on page {view_input.bounding_box.page}" - if view_input.label: - bbox_info += f'\nLabel: "{view_input.label}"' + # Return the image as ImageContent so the LLM can actually see it + import base64 + + image_b64 = base64.b64encode(img_bytes).decode("utf-8") - return f"Showing page {view_input.image_index} (Page ID: {page_id}){bbox_info}\nUse the coordinate grid (0-1000 scale) to specify bounding boxes." + return ImageContent( + type="image", + source={ + "type": "base64", + "media_type": "image/png", + "data": image_b64, + }, + ) return view_image diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index e0fa885f..ae558ce4 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -35,10 +35,10 @@ class ImageConfig(BaseModel): """Image processing configuration""" target_width: Optional[int] = Field( - default=None, description="Target width for images" + default=1200, description="Target width for images (default: 1200px for ~1MP)" ) target_height: Optional[int] = Field( - default=None, description="Target height for images" + default=1200, description="Target height for images (default: 1200px for ~1MP)" ) dpi: Optional[int] = Field(default=None, description="DPI for image rendering") preprocessing: Optional[bool] = Field( diff --git a/lib/idp_common_pkg/idp_common/image/__init__.py b/lib/idp_common_pkg/idp_common/image/__init__.py index 65cbdd33..eb43f726 100644 --- a/lib/idp_common_pkg/idp_common/image/__init__.py +++ b/lib/idp_common_pkg/idp_common/image/__init__.py @@ -3,12 +3,13 @@ from PIL import Image, ImageFilter, ImageChops, ImageOps, ImageDraw, ImageFont import io -import logging +import os from typing import Tuple, Optional, Dict, Any, Union, List +from aws_lambda_powertools import Logger from ..s3 import get_binary_content from ..utils import parse_s3_uri -logger = logging.getLogger(__name__) +logger = Logger(service="image", level=os.getenv("LOG_LEVEL", "INFO")) def resize_image( @@ -16,6 +17,7 @@ def resize_image( target_width: Optional[int] = None, target_height: Optional[int] = None, allow_upscale: bool = False, + output_format: Optional[str] = None, ) -> bytes: """ Resize an image to fit within target dimensions while preserving aspect ratio. @@ -27,9 +29,10 @@ def resize_image( target_width: Target width in pixels (None or empty string = no resize) target_height: Target height in pixels (None or empty string = no resize) allow_upscale: Whether to allow making the image larger than original + output_format: Force output format (e.g. 'PNG', 'JPEG'). If None, preserves original format. Returns: - Resized image bytes in original format (or JPEG if format cannot be preserved) + Resized image bytes in specified format (or original format if output_format is None) """ # Handle empty strings - convert to None if isinstance(target_width, str) and not target_width.strip(): @@ -57,6 +60,11 @@ def resize_image( current_width, current_height = image.size original_format = image.format # Store original format + logger.info( + f"resize_image: original={current_width}x{current_height} ({original_format}), " + f"target={target_width}x{target_height}, output_format={output_format}" + ) + # Calculate scaling factor to fit within bounds while preserving aspect ratio width_ratio = target_width / current_width height_ratio = target_height / current_height @@ -76,8 +84,14 @@ def resize_image( # Save in original format if possible img_byte_array = io.BytesIO() - # Determine save format - use original if available, otherwise JPEG - if original_format and original_format in [ + # Determine save format + if output_format: + # Use explicitly requested format + save_format = output_format.upper() + logger.info( + f"Converting from {original_format or 'unknown'} to {save_format}" + ) + elif original_format and original_format in [ "JPEG", "PNG", "GIF", @@ -97,6 +111,8 @@ def resize_image( if save_format in ["JPEG", "JPG"]: save_kwargs["quality"] = 95 # High quality save_kwargs["optimize"] = True + elif save_format == "PNG": + save_kwargs["optimize"] = True # Handle format-specific requirements if save_format == "PNG" and image.mode not in ["RGBA", "LA", "L", "P"]: @@ -105,11 +121,17 @@ def resize_image( image = image.convert("RGB") image.save(img_byte_array, **save_kwargs) - return img_byte_array.getvalue() + result_bytes = img_byte_array.getvalue() + logger.info( + f"resize_image: resized to {image.width}x{image.height}, " + f"saved as {save_format}, size={len(result_bytes):,} bytes" + ) + return result_bytes else: # No resizing needed - return original data unchanged logger.info( - f"Image {current_width}x{current_height} already fits within {target_width}x{target_height}, returning original" + f"resize_image: image {current_width}x{current_height} already fits within " + f"{target_width}x{target_height}, returning original ({len(image_data):,} bytes)" ) return image_data @@ -119,6 +141,7 @@ def prepare_image( target_width: Optional[int] = None, target_height: Optional[int] = None, allow_upscale: bool = False, + output_format: Optional[str] = None, ) -> bytes: """ Prepare an image for model input from either S3 URI or raw bytes @@ -128,9 +151,10 @@ def prepare_image( target_width: Target width in pixels (None or empty string = no resize) target_height: Target height in pixels (None or empty string = no resize) allow_upscale: Whether to allow making the image larger than original + output_format: Force output format (e.g. 'PNG', 'JPEG'). If None, preserves original format. Returns: - Processed image bytes ready for model input (preserves format when possible) + Processed image bytes ready for model input """ # Get the image data if isinstance(image_source, str) and image_source.startswith("s3://"): @@ -143,7 +167,9 @@ def prepare_image( ) # Resize and process - return resize_image(image_data, target_width, target_height, allow_upscale) + return resize_image( + image_data, target_width, target_height, allow_upscale, output_format + ) def apply_adaptive_binarization(image_data: bytes) -> bytes: diff --git a/lib/idp_common_pkg/idp_common/utils/grid_overlay.py b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py index 9430e471..51c1c802 100644 --- a/lib/idp_common_pkg/idp_common/utils/grid_overlay.py +++ b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py @@ -137,7 +137,8 @@ def add_ruler_edges( canvas = canvas.convert("RGB") img_byte_array = io.BytesIO() - canvas.save(img_byte_array, format="JPEG", quality=95) + # Use PNG with optimization for better compression while preserving quality + canvas.save(img_byte_array, format="PNG", optimize=True) logger.info(f"Ruler edges added. New size: {new_width}x{new_height}") return img_byte_array.getvalue() @@ -291,7 +292,8 @@ def draw_bounding_boxes( result = result.convert("RGB") img_byte_array = io.BytesIO() - result.save(img_byte_array, format="JPEG", quality=95) + # Use PNG with optimization for better compression while preserving quality + result.save(img_byte_array, format="PNG", optimize=True) logger.info(f"Drew {len(bboxes)} bounding boxes on image") return img_byte_array.getvalue() diff --git a/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py b/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py new file mode 100644 index 00000000..c102ee52 --- /dev/null +++ b/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py @@ -0,0 +1,242 @@ +""" +PDF utility functions for loading and converting PDF pages to images. + +This module provides utilities for working with PDF documents in assessment +and other workflows that need to display PDF pages as images. +""" + +import io +from pathlib import Path +from typing import Optional + +try: + import fitz # PyMuPDF + + HAS_PYMUPDF = True +except ImportError: + HAS_PYMUPDF = False + +try: + from PIL import Image + + HAS_PIL = True +except ImportError: + HAS_PIL = False + + +def pdf_page_to_image( + pdf_path: str | Path, + page_number: int = 0, + max_width: int = 1200, + max_height: int = 1200, + dpi_scale: float = 1.0, +) -> bytes: + """ + Convert a PDF page to a PNG image with optional resizing. + + Args: + pdf_path: Path to the PDF file + page_number: Page number (0-based index) + max_width: Maximum width in pixels (default: 1200 for ~1MP) + max_height: Maximum height in pixels (default: 1200 for ~1MP) + dpi_scale: DPI scaling factor (default: 1.0 = 72 DPI, 2.0 = 144 DPI) + + Returns: + PNG image as bytes + + Raises: + ImportError: If PyMuPDF is not installed + FileNotFoundError: If PDF file doesn't exist + ValueError: If page number is invalid + """ + if not HAS_PYMUPDF: + raise ImportError( + "PyMuPDF (fitz) is required for PDF handling. " + "Install with: pip install PyMuPDF" + ) + + pdf_path = Path(pdf_path) + if not pdf_path.exists(): + raise FileNotFoundError(f"PDF file not found: {pdf_path}") + + # Open PDF + pdf_doc = fitz.open(str(pdf_path)) + + try: + # Validate page number + if page_number < 0 or page_number >= len(pdf_doc): + raise ValueError( + f"Invalid page number {page_number}. " + f"PDF has {len(pdf_doc)} pages (0-{len(pdf_doc) - 1})" + ) + + # Get the page + page = pdf_doc[page_number] + + # Render page to pixmap + mat = fitz.Matrix(dpi_scale, dpi_scale) + pix = page.get_pixmap(matrix=mat) + + # Convert to PNG bytes + png_bytes = pix.tobytes("png") + + # Always resize to ensure we stay within limits + if HAS_PIL: + png_bytes = _resize_image(png_bytes, max_width, max_height) + + return png_bytes + + finally: + pdf_doc.close() + + +def pdf_to_images( + pdf_path: str | Path, + max_width: int = 1200, + max_height: int = 1200, + dpi_scale: float = 1.0, + page_numbers: Optional[list[int]] = None, +) -> list[bytes]: + """ + Convert multiple PDF pages to PNG images. + + Args: + pdf_path: Path to the PDF file + max_width: Maximum width in pixels (default: 1200 for ~1MP) + max_height: Maximum height in pixels (default: 1200 for ~1MP) + dpi_scale: DPI scaling factor (default: 1.0 = 72 DPI) + page_numbers: List of page numbers to convert (0-based). If None, converts all pages. + + Returns: + List of PNG images as bytes, one per page + + Raises: + ImportError: If PyMuPDF is not installed + FileNotFoundError: If PDF file doesn't exist + """ + if not HAS_PYMUPDF: + raise ImportError( + "PyMuPDF (fitz) is required for PDF handling. " + "Install with: pip install PyMuPDF" + ) + + pdf_path = Path(pdf_path) + if not pdf_path.exists(): + raise FileNotFoundError(f"PDF file not found: {pdf_path}") + + pdf_doc = fitz.open(str(pdf_path)) + + try: + # Determine which pages to convert + if page_numbers is None: + page_numbers = list(range(len(pdf_doc))) + + images = [] + for page_num in page_numbers: + if page_num < 0 or page_num >= len(pdf_doc): + raise ValueError( + f"Invalid page number {page_num}. " + f"PDF has {len(pdf_doc)} pages (0-{len(pdf_doc) - 1})" + ) + + # Get the page + page = pdf_doc[page_num] + + # Render page to pixmap + mat = fitz.Matrix(dpi_scale, dpi_scale) + pix = page.get_pixmap(matrix=mat) + + # Convert to PNG bytes + png_bytes = pix.tobytes("png") + + # Always resize to ensure we stay within limits + if HAS_PIL: + png_bytes = _resize_image(png_bytes, max_width, max_height) + + images.append(png_bytes) + + return images + + finally: + pdf_doc.close() + + +def get_pdf_page_count(pdf_path: str | Path) -> int: + """ + Get the number of pages in a PDF. + + Args: + pdf_path: Path to the PDF file + + Returns: + Number of pages in the PDF + + Raises: + ImportError: If PyMuPDF is not installed + FileNotFoundError: If PDF file doesn't exist + """ + if not HAS_PYMUPDF: + raise ImportError( + "PyMuPDF (fitz) is required for PDF handling. " + "Install with: pip install PyMuPDF" + ) + + pdf_path = Path(pdf_path) + if not pdf_path.exists(): + raise FileNotFoundError(f"PDF file not found: {pdf_path}") + + pdf_doc = fitz.open(str(pdf_path)) + page_count = len(pdf_doc) + pdf_doc.close() + + return page_count + + +def _resize_image( + png_bytes: bytes, + max_width: int, + max_height: int, +) -> bytes: + """ + Resize a PNG image while maintaining aspect ratio. + + Args: + png_bytes: PNG image as bytes + max_width: Maximum width in pixels + max_height: Maximum height in pixels + + Returns: + Resized PNG image as bytes + """ + if not HAS_PIL: + # If PIL not available, return original + return png_bytes + + img = Image.open(io.BytesIO(png_bytes)) + + # Resize to max dimensions while maintaining aspect ratio + img.thumbnail((max_width, max_height), Image.Resampling.LANCZOS) + + # Save as optimized PNG + buffer = io.BytesIO() + img.save(buffer, format="PNG", optimize=True) + + return buffer.getvalue() + + +def create_minimal_png() -> bytes: + """ + Create a minimal 1x1 pixel white PNG image. + + Useful as a fallback when PDF loading fails or for testing. + + Returns: + Minimal PNG image as bytes (1x1 white pixel) + """ + return ( + b"\x89PNG\r\n\x1a\n" + b"\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01" + b"\x08\x02\x00\x00\x00\x90wS\xde" + b"\x00\x00\x00\x0cIDATx\x9cc\x00\x01\x00\x00\x05\x00\x01\r\n-\xb4" + b"\x00\x00\x00\x00IEND\xaeB`\x82" + ) diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index 2be2cafe..77d3ab47 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -964,7 +964,10 @@ appsync = [ { name = "requests" }, ] assessment = [ + { name = "aws-lambda-powertools" }, + { name = "mypy-boto3-dynamodb" }, { name = "pillow" }, + { name = "strands-agents" }, ] classification = [ { name = "pillow" }, @@ -1045,6 +1048,7 @@ requires-dist = [ { name = "amazon-textract-textractor", extras = ["pandas"], marker = "extra == 'all'", specifier = "==1.9.2" }, { name = "amazon-textract-textractor", extras = ["pandas"], marker = "extra == 'ocr'", specifier = "==1.9.2" }, { name = "aws-lambda-powertools", marker = "extra == 'agentic-extraction'", specifier = ">=3.2.0" }, + { name = "aws-lambda-powertools", marker = "extra == 'assessment'", specifier = ">=3.2.0" }, { name = "aws-xray-sdk", marker = "extra == 'docs-service'", specifier = ">=2.14.0" }, { name = "bedrock-agentcore", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = ">=0.1.1" }, { name = "bedrock-agentcore", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = ">=0.1.1" }, @@ -1067,6 +1071,7 @@ requires-dist = [ { name = "moto", extras = ["s3"], marker = "extra == 'test'", specifier = "==5.1.8" }, { name = "munkres", marker = "extra == 'all'", specifier = ">=1.1.4" }, { name = "munkres", marker = "extra == 'evaluation'", specifier = ">=1.1.4" }, + { name = "mypy-boto3-dynamodb", marker = "extra == 'assessment'", specifier = ">=1.40.56" }, { name = "numpy", marker = "extra == 'all'", specifier = "==1.26.4" }, { name = "numpy", marker = "extra == 'evaluation'", specifier = "==1.26.4" }, { name = "numpy", marker = "extra == 'ocr'", specifier = "==1.26.4" }, @@ -1111,10 +1116,11 @@ requires-dist = [ { name = "s3fs", marker = "extra == 'criteria-validation'", specifier = "==2023.12.2" }, { name = "stickler-eval", marker = "extra == 'all'", specifier = "==0.1.2" }, { name = "stickler-eval", marker = "extra == 'evaluation'", specifier = "==0.1.2" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agentic-extraction'", specifier = "==1.14.0" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==1.14.0" }, - { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==1.14.0" }, - { name = "strands-agents", marker = "extra == 'code-intel'", specifier = "==1.14.0" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agentic-extraction'", specifier = "==1.17.0" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==1.17.0" }, + { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==1.17.0" }, + { name = "strands-agents", marker = "extra == 'assessment'", specifier = "==1.17.0" }, + { name = "strands-agents", marker = "extra == 'code-intel'", specifier = "==1.17.0" }, { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==0.2.13" }, { name = "strands-agents-tools", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==0.2.13" }, { name = "strands-agents-tools", marker = "extra == 'code-intel'", specifier = "==0.2.13" }, @@ -1852,6 +1858,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/ab/0301c945a704218bc9435f0e3c88884f6b19ef234d8899fb47ce1ccfd0c9/munkres-1.1.4-py2.py3-none-any.whl", hash = "sha256:6b01867d4a8480d865aea2326e4b8f7c46431e9e55b4a2e32d989307d7bced2a", size = 7015, upload-time = "2020-09-15T15:12:19.627Z" }, ] +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.40.56" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/20/b543f76de1e5744b32d9051716ce464e595765b5aadac7ac36e24363a986/mypy_boto3_dynamodb-1.40.56.tar.gz", hash = "sha256:576dd12fe1125754066e7fa480f92c123220970a9d69f7663a56d701f2978ac5", size = 47972, upload-time = "2025-10-21T20:35:02.447Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/93/2bb83680398a28cbc729e120c4ae745adf76d7a20f31499189cce2a46485/mypy_boto3_dynamodb-1.40.56-py3-none-any.whl", hash = "sha256:3bf3f541a0d21c249109dd65f18c61b3e6a0fe7124b3afe989877d5cca42b65a", size = 56996, upload-time = "2025-10-21T20:34:54.187Z" }, +] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -3089,7 +3104,7 @@ wheels = [ [[package]] name = "strands-agents" -version = "1.14.0" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3" }, @@ -3104,9 +3119,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/dd/a2dc96614bb1dd7c1623cbdf6df268eb307038b2fe27bc5a6148f4223f59/strands_agents-1.14.0.tar.gz", hash = "sha256:f86dd2b92d50196acd0c5ff5404fcd1b6c3715ae56fcceb2a78210ab47860585", size = 471216, upload-time = "2025-10-29T14:20:27.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/c4/925da2e52279e53067c1306ce1b116bd0cee334ddee751a207b05ad0e668/strands_agents-1.17.0.tar.gz", hash = "sha256:60b0006f2d60534761a468aa15916f0ec7670952ce32c829d4f91bc3a243dfec", size = 514561, upload-time = "2025-11-18T19:13:33.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/08/848c26d917d8f75bc10edeea63578af218df5c6efeb69e3cf91eb8fd396e/strands_agents-1.14.0-py3-none-any.whl", hash = "sha256:d2ebc1b991c37e891cfe79cf5dea1bfa021c3e6d93e4d2f238042ea688019bf3", size = 238974, upload-time = "2025-10-29T14:20:24.841Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/2d9f15af01dbd4854fbaf68bf8c41b23359878182639f3686c555189964b/strands_agents-1.17.0-py3-none-any.whl", hash = "sha256:8b8412048a3ca3568a9f7253de0d2c931e36149cee2b9ca7b823cce51d64e36d", size = 253993, upload-time = "2025-11-18T19:13:31.964Z" }, ] [[package]] diff --git a/lib/idp_common_pkg/verify_stickler.py b/lib/idp_common_pkg/verify_stickler.py index 16d29410..42d82917 100644 --- a/lib/idp_common_pkg/verify_stickler.py +++ b/lib/idp_common_pkg/verify_stickler.py @@ -10,7 +10,7 @@ """ import sys -from typing import Any, Dict +from typing import Any, dict def verify_stickler_import(): @@ -65,7 +65,7 @@ def verify_dynamic_model_creation(): from stickler import StructuredModel # Test configuration - config: Dict[str, Any] = { + config: dict[str, Any] = { "model_name": "TestModel", "match_threshold": 0.8, "fields": { @@ -105,7 +105,7 @@ def verify_list_matching(): try: from stickler import StructuredModel - config: Dict[str, Any] = { + config: dict[str, Any] = { "model_name": "ListTestModel", "fields": { "items": { From 760135492c512734db49e92949bd3c1f0c879df8 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 19 Nov 2025 13:40:42 +0200 Subject: [PATCH 08/30] fix tool --- .../idp_common/assessment/strands_tools.py | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index a7b18a6f..2b6c1f30 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -12,6 +12,7 @@ from pydantic import BaseModel, Field from strands import Agent, tool from strands.types.content import ContentBlock, ImageContent +from strands.types.tools import ToolResult, ToolResultContent, ToolResultStatus from idp_common.assessment.strands_models import BoundingBox from idp_common.utils.grid_overlay import draw_bounding_boxes @@ -49,7 +50,7 @@ def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) """ @tool - def view_image(input_data: dict[str, Any], agent: Agent) -> ImageContent: + def view_image(input_data: dict[str, Any], agent: Agent) -> dict: """ View a specific page image, optionally highlighting a bounding box area. @@ -127,19 +128,19 @@ def view_image(input_data: dict[str, Any], agent: Agent) -> ImageContent: }, ) - # Return the image as ImageContent so the LLM can actually see it - import base64 - - image_b64 = base64.b64encode(img_bytes).decode("utf-8") - - return ImageContent( - type="image", - source={ - "type": "base64", - "media_type": "image/png", - "data": image_b64, - }, - ) + return { + "status": "success", + "content": [ + { + "image": { + "format": "png", + "source": { + "bytes": img_bytes, + }, + } + } + ], + } return view_image From eedd6a8ec6bb3e8144a96e083aad99cf93eb92ae Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 19 Nov 2025 16:54:08 +0200 Subject: [PATCH 09/30] assesment update --- .../idp_common/assessment/granular_service.py | 5 +- .../idp_common/assessment/strands_service.py | 62 +++++-------------- .../idp_common/assessment/strands_tools.py | 37 ++++++++++- .../idp_common/config/models.py | 2 +- lib/idp_common_pkg/pyproject.toml | 11 ++-- lib/idp_common_pkg/uv.lock | 11 ++++ 6 files changed, 71 insertions(+), 57 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 83d6455e..482885d6 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -14,9 +14,10 @@ import json import os import time -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any from aws_lambda_powertools import Logger +from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource from idp_common import image, metrics, s3, utils from idp_common.assessment.models import AssessmentResult, AssessmentTask @@ -31,7 +32,7 @@ X_AWS_IDP_CONFIDENCE_THRESHOLD, X_AWS_IDP_DOCUMENT_TYPE, ) -from idp_common.extraction.models import ExtractionData, ExtractionMetadata +from idp_common.extraction.models import ExtractionData from idp_common.models import Document, Status from idp_common.utils import check_token_limit from idp_common.utils.grid_overlay import add_ruler_edges diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index b3df37ca..04da4fc2 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -12,7 +12,7 @@ from aws_lambda_powertools import Logger from botocore.config import Config -from strands import Agent, tool +from strands import Agent from strands.agent.conversation_manager import SummarizingConversationManager from strands.models.bedrock import BedrockModel from strands.types.content import ContentBlock, Message @@ -20,52 +20,13 @@ from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_models import AssessmentOutput from idp_common.assessment.strands_tools import create_strands_tools +from idp_common.utils.bedrock_utils import ( + async_exponential_backoff_retry, +) logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) -def create_submit_assessment_tool(): - """ - Create a tool for submitting assessment results. - - Returns: - A Strands tool function for submitting assessments - """ - - @tool - def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: - """ - Submit your final confidence assessment. - - Use this tool when you have: - 1. Located the values in the document images - 2. Determined precise bounding box coordinates using ruler markings - 3. Assessed the confidence based on clarity and accuracy - - Args: - assessment: Dictionary with: - - assessments: dict mapping attribute names to ConfidenceAssessment - - alerts: list of any threshold alerts (optional) - - Returns: - Success confirmation message or validation error details - """ - # Validate assessment structure and return helpful errors - validated_assessment = AssessmentOutput(**assessment) # pyright: ignore[reportCallIssue] - - # Store in agent state - agent.state.set("assessment_output", validated_assessment.model_dump()) - - logger.info( - "Assessment submitted successfully", - extra={"assessment": validated_assessment.model_dump()}, - ) - - return "Assessment submitted successfully. You can now finish the task." - - return submit_assessment - - async def assess_attribute_with_strands( task: AssessmentTask, extraction_results: dict[str, Any], @@ -105,9 +66,7 @@ async def assess_attribute_with_strands( try: # 1. Create tools (image viewer + todo list + submit assessment) base_tools = create_strands_tools(page_images, sorted_page_ids) - submit_tool = create_submit_assessment_tool() - tools = base_tools + [submit_tool] - + tools = base_tools # 2. Build enhanced system prompt with schema and extraction (for caching) enhanced_system_prompt = _build_system_prompt_with_context( system_prompt, document_schema, extraction_results, len(page_images) @@ -161,8 +120,17 @@ async def assess_attribute_with_strands( }, ) - response = await agent.invoke_async([user_message]) + @async_exponential_backoff_retry( + max_retries=30, + initial_delay=5, + exponential_base=2, + jitter=0.5, + max_delay=900, + ) + async def invoke_agent_with_retry(): + return await agent.invoke_async([user_message]) + response = await invoke_agent_with_retry() logger.debug("Agent response received", extra={"task_id": task.task_id}) # 7. Extract assessment from agent state diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 2b6c1f30..44379aa5 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -11,10 +11,8 @@ from aws_lambda_powertools import Logger from pydantic import BaseModel, Field from strands import Agent, tool -from strands.types.content import ContentBlock, ImageContent -from strands.types.tools import ToolResult, ToolResultContent, ToolResultStatus -from idp_common.assessment.strands_models import BoundingBox +from idp_common.assessment.strands_models import AssessmentOutput, BoundingBox from idp_common.utils.grid_overlay import draw_bounding_boxes from idp_common.utils.strands_agent_tools.todo_list import ( create_todo_list, @@ -37,6 +35,38 @@ class ViewImageInput(BaseModel): label: str | None = Field(None, description="Optional label for the bounding box") +@tool +def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: + """ + Submit your final confidence assessment. + + Use this tool when you have: + 1. Located the values in the document images + 2. Determined precise bounding box coordinates using ruler markings + 3. Assessed the confidence based on clarity and accuracy + + Args: + assessment: Dictionary with: + - assessments: dict mapping attribute names to ConfidenceAssessment + - alerts: list of any threshold alerts (optional) + + Returns: + Success confirmation message or validation error details + """ + # Validate assessment structure and return helpful errors + validated_assessment = AssessmentOutput(**assessment) # pyright: ignore[reportCallIssue] + + # Store in agent state + agent.state.set("assessment_output", validated_assessment.model_dump()) + + logger.info( + "Assessment submitted successfully", + extra={"assessment": validated_assessment.model_dump()}, + ) + + return "Assessment submitted successfully. You can now finish the task." + + def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) -> Any: """ Create a view_image tool that has access to page images. @@ -163,4 +193,5 @@ def create_strands_tools( create_todo_list, update_todo, view_todo_list, + submit_assessment, ] diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index ae558ce4..04f97632 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -317,7 +317,7 @@ class AssessmentConfig(BaseModel): image: ImageConfig = Field(default_factory=ImageConfig) # Granular assessment settings (always enabled, no longer nested) max_workers: int = Field( - default=20, gt=0, description="Max concurrent workers for parallel assessment" + default=50, gt=0, description="Max concurrent workers for parallel assessment" ) @field_validator( diff --git a/lib/idp_common_pkg/pyproject.toml b/lib/idp_common_pkg/pyproject.toml index 14e933fd..5d2dbd2a 100644 --- a/lib/idp_common_pkg/pyproject.toml +++ b/lib/idp_common_pkg/pyproject.toml @@ -25,6 +25,7 @@ dependencies = [ "jsonschema>=4.25.1", "pydantic>=2.12.0", "deepdiff>=6.0.0", + "mypy-boto3-bedrock-runtime>=1.40.76", ] readme = "README.md" classifiers = [ @@ -73,6 +74,8 @@ extraction = [ assessment = [ "Pillow==11.2.1", # For image handling "aws-lambda-powertools>=3.2.0", # Structured logging and observability + "mypy-boto3-dynamodb>=1.40.56", + "strands-agents==1.17.0", ] # Evaluation module dependencies @@ -98,7 +101,7 @@ appsync = ["requests==2.32.4"] # Agents module dependencies agents = [ - "strands-agents==1.14.0; python_version>='3.10'", + "strands-agents==1.17.0; python_version>='3.10'", "strands-agents-tools==0.2.13; python_version>='3.10'", "bedrock-agentcore>=0.1.1; python_version>='3.10'", # Specifically for the code interpreter tool ] @@ -106,7 +109,7 @@ agents = [ # Code intelligence module dependencies code_intel = [ "requests==2.32.4", - "strands-agents==1.14.0", + "strands-agents==1.17.0", "strands-agents-tools==0.2.13", "bedrock-agentcore>=0.1.1", "PyYAML>=6.0.0", @@ -164,14 +167,14 @@ all = [ "pyarrow==20.0.0", "openpyxl==3.1.5", "python-docx==1.2.0", - "strands-agents==1.14.0; python_version>='3.10'", + "strands-agents==1.17.0; python_version>='3.10'", "strands-agents-tools==0.2.13; python_version>='3.10'", "bedrock-agentcore>=0.1.1; python_version>='3.10'", # "s3fs==2023.12.2" - - disabled till we fix package dependencies ] agentic-extraction = [ "jsonpatch==1.33", - "strands-agents==1.14.0 ; python_full_version >= '3.10'", + "strands-agents==1.17.0 ; python_full_version >= '3.10'", "pandas>=2.2.3", "pymupdf==1.25.5", # Pinned to 1.25.5 - has pre-built ARM64 wheels, 1.26.x requires compilation "email-validator>=2.3.0", diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index 77d3ab47..62e1f082 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -924,6 +924,7 @@ dependencies = [ { name = "boto3" }, { name = "deepdiff" }, { name = "jsonschema" }, + { name = "mypy-boto3-bedrock-runtime" }, { name = "pydantic" }, ] @@ -1071,6 +1072,7 @@ requires-dist = [ { name = "moto", extras = ["s3"], marker = "extra == 'test'", specifier = "==5.1.8" }, { name = "munkres", marker = "extra == 'all'", specifier = ">=1.1.4" }, { name = "munkres", marker = "extra == 'evaluation'", specifier = ">=1.1.4" }, + { name = "mypy-boto3-bedrock-runtime", specifier = ">=1.40.76" }, { name = "mypy-boto3-dynamodb", marker = "extra == 'assessment'", specifier = ">=1.40.56" }, { name = "numpy", marker = "extra == 'all'", specifier = "==1.26.4" }, { name = "numpy", marker = "extra == 'evaluation'", specifier = "==1.26.4" }, @@ -1858,6 +1860,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/ab/0301c945a704218bc9435f0e3c88884f6b19ef234d8899fb47ce1ccfd0c9/munkres-1.1.4-py2.py3-none-any.whl", hash = "sha256:6b01867d4a8480d865aea2326e4b8f7c46431e9e55b4a2e32d989307d7bced2a", size = 7015, upload-time = "2020-09-15T15:12:19.627Z" }, ] +[[package]] +name = "mypy-boto3-bedrock-runtime" +version = "1.40.76" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/db/cc668a48a27973df31c7044a6785bd0e8691b1a0419dae001c4c29f1c98f/mypy_boto3_bedrock_runtime-1.40.76.tar.gz", hash = "sha256:52f2a2b3955eb9f4f0d075398f2d430abcc6bf56ff00815b94e3371e66030059", size = 28428, upload-time = "2025-11-18T21:42:43.41Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/6f/8b04729224a76952e08406eccbbbebfa75ee7df91313279d76428f13fdc2/mypy_boto3_bedrock_runtime-1.40.76-py3-none-any.whl", hash = "sha256:0347f6d78e342d640da74bbd6158b276c5cb39ef73405084a65fe490766b6dab", size = 34454, upload-time = "2025-11-18T21:42:42.156Z" }, +] + [[package]] name = "mypy-boto3-dynamodb" version = "1.40.56" From 6a6d610c383a1b854510372f3b489f3506407e75 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Fri, 21 Nov 2025 15:55:53 +0000 Subject: [PATCH 10/30] further streamlining --- .../bank-statement-sample/config.yaml | 111 --- .../lending-package-sample/config.yaml | 110 --- .../config.yaml | 112 +-- .../rvl-cdip-package-sample/config.yaml | 112 +-- .../idp_common/assessment/models.py | 69 +- .../idp_common/assessment/service.py | 852 ++++++++++-------- .../idp_common/assessment/strands_service.py | 383 ++++---- .../idp_common/bedrock/__init__.py | 16 +- .../idp_common/bedrock/model_config.py | 178 ++++ .../idp_common/config/models.py | 29 +- .../idp_common/extraction/agentic_idp.py | 39 +- lib/idp_common_pkg/uv.lock | 10 +- 12 files changed, 969 insertions(+), 1052 deletions(-) create mode 100644 lib/idp_common_pkg/idp_common/bedrock/model_config.py diff --git a/config_library/pattern-2/bank-statement-sample/config.yaml b/config_library/pattern-2/bank-statement-sample/config.yaml index d345dca4..9a9ef807 100644 --- a/config_library/pattern-2/bank-statement-sample/config.yaml +++ b/config_library/pattern-2/bank-statement-sample/config.yaml @@ -452,117 +452,6 @@ assessment: - Provide tight, accurate bounding boxes around the actual text - - For each field, provide bounding box coordinates: - - bbox: [x1, y1, x2, y2] coordinates in normalized 0-1000 scale - - page: Page number where the field appears (starting from 1) - - Coordinate system: - - The document images have ruler markings along all edges showing the 0-1000 coordinate scale - - Use these ruler markings to determine precise coordinates for bounding boxes - - Use normalized scale 0-1000 for both x and y axes - - x1, y1 = top-left corner of bounding box - - x2, y2 = bottom-right corner of bounding box - - Ensure x2 > x1 and y2 > y1 - - Make bounding boxes tight around the actual text content - - If a field spans multiple lines, create a bounding box that encompasses all relevant text - - Reference the ruler markings on the image edges to provide accurate coordinates - - - - Analyze the extraction results against the source document and provide confidence assessments with spatial localization. Return a JSON object with the following structure based on the attribute type: - - For SIMPLE attributes: - { - "simple_attribute_name": { - "confidence": 0.85, - "bbox": [100, 200, 300, 250], - "page": 1 - } - } - - For GROUP attributes (nested object structure): - { - "group_attribute_name": { - "sub_attribute_1": { - "confidence": 0.90, - "bbox": [150, 300, 250, 320], - "page": 1 - }, - "sub_attribute_2": { - "confidence": 0.75, - "bbox": [150, 325, 280, 345], - "page": 1 - } - } - } - - For LIST attributes (array of assessed items): - { - "list_attribute_name": [ - { - "item_attribute_1": { - "confidence": 0.95, - "bbox": [100, 400, 200, 420], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.88, - "bbox": [250, 400, 350, 420], - "page": 1 - } - }, - { - "item_attribute_1": { - "confidence": 0.92, - "bbox": [100, 425, 200, 445], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.70, - "bbox": [250, 425, 350, 445], - "page": 1 - } - } - ] - } - - IMPORTANT: - - The contains raw JSON schema for the attributes you should assess - - The contains the FULL extraction data (you have complete context for cross-referencing) - - The specifies which attributes to focus on - - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations - - Include assessments AND bounding boxes for ALL attributes present in the extraction results - - Match the exact structure of the extracted data - - Provide page numbers for all bounding boxes (starting from 1) - - - <> - - - {DOCUMENT_IMAGE} - - - - {OCR_TEXT_CONFIDENCE} - - - <> - - - {TASK_SCHEMA} - - - - {TASK_INSTRUCTION} - - - - {EXTRACTION_RESULTS} - - evaluation: enabled: true llm_method: diff --git a/config_library/pattern-2/lending-package-sample/config.yaml b/config_library/pattern-2/lending-package-sample/config.yaml index 92928e1b..cd0646ac 100644 --- a/config_library/pattern-2/lending-package-sample/config.yaml +++ b/config_library/pattern-2/lending-package-sample/config.yaml @@ -1478,116 +1478,6 @@ assessment: - Provide tight, accurate bounding boxes around the actual text - - For each field, provide bounding box coordinates: - - bbox: [x1, y1, x2, y2] coordinates in normalized 0-1000 scale - - page: Page number where the field appears (starting from 1) - - Coordinate system: - - The document images have ruler markings along all edges showing the 0-1000 coordinate scale - - Use these ruler markings to determine precise coordinates for bounding boxes - - Use normalized scale 0-1000 for both x and y axes - - x1, y1 = top-left corner of bounding box - - x2, y2 = bottom-right corner of bounding box - - Ensure x2 > x1 and y2 > y1 - - Make bounding boxes tight around the actual text content - - If a field spans multiple lines, create a bounding box that encompasses all relevant text - - Reference the ruler markings on the image edges to provide accurate coordinates - - - - Analyze the extraction results against the source document and provide confidence assessments with spatial localization. Return a JSON object with the following structure based on the attribute type: - - For SIMPLE attributes: - { - "simple_attribute_name": { - "confidence": 0.85, - "bbox": [100, 200, 300, 250], - "page": 1 - } - } - - For GROUP attributes (nested object structure): - { - "group_attribute_name": { - "sub_attribute_1": { - "confidence": 0.90, - "bbox": [150, 300, 250, 320], - "page": 1 - }, - "sub_attribute_2": { - "confidence": 0.75, - "bbox": [150, 325, 280, 345], - "page": 1 - } - } - } - - For LIST attributes (array of assessed items): - { - "list_attribute_name": [ - { - "item_attribute_1": { - "confidence": 0.95, - "bbox": [100, 400, 200, 420], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.88, - "bbox": [250, 400, 350, 420], - "page": 1 - } - }, - { - "item_attribute_1": { - "confidence": 0.92, - "bbox": [100, 425, 200, 445], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.70, - "bbox": [250, 425, 350, 445], - "page": 1 - } - } - ] - } - - IMPORTANT: - - The contains raw JSON schema for the attributes you should assess - - The contains the FULL extraction data (you have complete context for cross-referencing) - - The specifies which attributes to focus on - - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations - - Include assessments AND bounding boxes for ALL attributes present in the extraction results - - Match the exact structure of the extracted data - - Provide page numbers for all bounding boxes (starting from 1) - - - <> - - - {DOCUMENT_IMAGE} - - - - {OCR_TEXT_CONFIDENCE} - - - <> - - - {TASK_SCHEMA} - - - - {TASK_INSTRUCTION} - - - - {EXTRACTION_RESULTS} - evaluation: enabled: true llm_method: diff --git a/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml b/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml index 5009d75a..31ed72d7 100644 --- a/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml +++ b/config_library/pattern-2/rvl-cdip-package-sample-with-few-shot-examples/config.yaml @@ -1026,116 +1026,6 @@ assessment: - Provide tight, accurate bounding boxes around the actual text - - For each field, provide bounding box coordinates: - - bbox: [x1, y1, x2, y2] coordinates in normalized 0-1000 scale - - page: Page number where the field appears (starting from 1) - - Coordinate system: - - The document images have ruler markings along all edges showing the 0-1000 coordinate scale - - Use these ruler markings to determine precise coordinates for bounding boxes - - Use normalized scale 0-1000 for both x and y axes - - x1, y1 = top-left corner of bounding box - - x2, y2 = bottom-right corner of bounding box - - Ensure x2 > x1 and y2 > y1 - - Make bounding boxes tight around the actual text content - - If a field spans multiple lines, create a bounding box that encompasses all relevant text - - Reference the ruler markings on the image edges to provide accurate coordinates - - - - Analyze the extraction results against the source document and provide confidence assessments with spatial localization. Return a JSON object with the following structure based on the attribute type: - - For SIMPLE attributes: - { - "simple_attribute_name": { - "confidence": 0.85, - "bbox": [100, 200, 300, 250], - "page": 1 - } - } - - For GROUP attributes (nested object structure): - { - "group_attribute_name": { - "sub_attribute_1": { - "confidence": 0.90, - "bbox": [150, 300, 250, 320], - "page": 1 - }, - "sub_attribute_2": { - "confidence": 0.75, - "bbox": [150, 325, 280, 345], - "page": 1 - } - } - } - - For LIST attributes (array of assessed items): - { - "list_attribute_name": [ - { - "item_attribute_1": { - "confidence": 0.95, - "bbox": [100, 400, 200, 420], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.88, - "bbox": [250, 400, 350, 420], - "page": 1 - } - }, - { - "item_attribute_1": { - "confidence": 0.92, - "bbox": [100, 425, 200, 445], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.70, - "bbox": [250, 425, 350, 445], - "page": 1 - } - } - ] - } - - IMPORTANT: - - The contains raw JSON schema for the attributes you should assess - - The contains the FULL extraction data (you have complete context for cross-referencing) - - The specifies which attributes to focus on - - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations - - Include assessments AND bounding boxes for ALL attributes present in the extraction results - - Match the exact structure of the extracted data - - Provide page numbers for all bounding boxes (starting from 1) - - - <> - - - {DOCUMENT_IMAGE} - - - - {OCR_TEXT_CONFIDENCE} - - - <> - - - {TASK_SCHEMA} - - - - {TASK_INSTRUCTION} - - - - {EXTRACTION_RESULTS} - evaluation: enabled: true llm_method: @@ -1452,7 +1342,7 @@ agents: parameters: max_log_events: 5 time_range_hours_default: 24 - + chat_companion: model_id: us.anthropic.claude-haiku-4-5-20251001-v1:0 pricing: diff --git a/config_library/pattern-2/rvl-cdip-package-sample/config.yaml b/config_library/pattern-2/rvl-cdip-package-sample/config.yaml index f7720b03..05bde818 100644 --- a/config_library/pattern-2/rvl-cdip-package-sample/config.yaml +++ b/config_library/pattern-2/rvl-cdip-package-sample/config.yaml @@ -1121,116 +1121,6 @@ assessment: - Provide tight, accurate bounding boxes around the actual text - - For each field, provide bounding box coordinates: - - bbox: [x1, y1, x2, y2] coordinates in normalized 0-1000 scale - - page: Page number where the field appears (starting from 1) - - Coordinate system: - - The document images have ruler markings along all edges showing the 0-1000 coordinate scale - - Use these ruler markings to determine precise coordinates for bounding boxes - - Use normalized scale 0-1000 for both x and y axes - - x1, y1 = top-left corner of bounding box - - x2, y2 = bottom-right corner of bounding box - - Ensure x2 > x1 and y2 > y1 - - Make bounding boxes tight around the actual text content - - If a field spans multiple lines, create a bounding box that encompasses all relevant text - - Reference the ruler markings on the image edges to provide accurate coordinates - - - - Analyze the extraction results against the source document and provide confidence assessments with spatial localization. Return a JSON object with the following structure based on the attribute type: - - For SIMPLE attributes: - { - "simple_attribute_name": { - "confidence": 0.85, - "bbox": [100, 200, 300, 250], - "page": 1 - } - } - - For GROUP attributes (nested object structure): - { - "group_attribute_name": { - "sub_attribute_1": { - "confidence": 0.90, - "bbox": [150, 300, 250, 320], - "page": 1 - }, - "sub_attribute_2": { - "confidence": 0.75, - "bbox": [150, 325, 280, 345], - "page": 1 - } - } - } - - For LIST attributes (array of assessed items): - { - "list_attribute_name": [ - { - "item_attribute_1": { - "confidence": 0.95, - "bbox": [100, 400, 200, 420], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.88, - "bbox": [250, 400, 350, 420], - "page": 1 - } - }, - { - "item_attribute_1": { - "confidence": 0.92, - "bbox": [100, 425, 200, 445], - "page": 1 - }, - "item_attribute_2": { - "confidence": 0.70, - "bbox": [250, 425, 350, 445], - "page": 1 - } - } - ] - } - - IMPORTANT: - - The contains raw JSON schema for the attributes you should assess - - The contains the FULL extraction data (you have complete context for cross-referencing) - - The specifies which attributes to focus on - - For LIST attributes like "Transactions", assess EACH individual item in the list separately with individual bounding boxes - - Each transaction should be assessed as a separate object in the array with its own spatial coordinates - - Do NOT provide aggregate assessments for list items - assess each one individually with precise locations - - Include assessments AND bounding boxes for ALL attributes present in the extraction results - - Match the exact structure of the extracted data - - Provide page numbers for all bounding boxes (starting from 1) - - - <> - - - {DOCUMENT_IMAGE} - - - - {OCR_TEXT_CONFIDENCE} - - - <> - - - {TASK_SCHEMA} - - - - {TASK_INSTRUCTION} - - - - {EXTRACTION_RESULTS} - evaluation: enabled: true llm_method: @@ -1463,7 +1353,7 @@ agents: parameters: max_log_events: 5 time_range_hours_default: 24 - + chat_companion: model_id: us.anthropic.claude-haiku-4-5-20251001-v1:0 pricing: diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index f76361c3..909a6ae3 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -10,7 +10,7 @@ from typing import Any -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, field_validator class AttributeAssessment(BaseModel): @@ -93,3 +93,70 @@ class AssessmentResult(BaseModel): error_message: str | None = None processing_time: float = 0.0 metering: dict[str, Any] | None = None + + +# ============================================================================ +# Models for assessment service.py (data flow and processing) +# ============================================================================ + + +class Geometry(BaseModel): + """Geometry in IDP format (converted from BoundingBox).""" + + boundingBox: dict[str, float] # {top, left, width, height} + page: int + + +class ConfidenceAlert(BaseModel): + """Alert for confidence threshold violation.""" + + attribute_name: str + confidence: float + confidence_threshold: float + + @field_validator("confidence", "confidence_threshold", mode="before") + @classmethod + def parse_float(cls, v: Any) -> float: + """Parse float from string or number, handle None.""" + if v is None: + return 0.0 + if isinstance(v, (int, float)): + return float(v) + if isinstance(v, str): + if not v.strip(): + return 0.0 + try: + return float(v) + except (ValueError, TypeError): + return 0.0 + # Fallback for other types + try: + return float(v) + except (ValueError, TypeError): + return 0.0 + + +class DocumentContent(BaseModel): + """Loaded content from document pages.""" + + document_text: str + page_images: list[Any] + ocr_text_confidence: str + + +class ExtractionData(BaseModel): + """Loaded extraction data from S3.""" + + extraction_results: dict[str, Any] # The inference_result dict + full_data: dict[str, Any] # Complete data including metadata + + +class AssessmentProcessingResult(BaseModel): + """Result of processing assessment data.""" + + enhanced_assessment_data: dict[str, Any] + confidence_alerts: list[ConfidenceAlert] + metering: dict[str, Any] + processing_metadata: dict[ + str, Any + ] # Contains assessment_time_seconds, parsing_succeeded, etc. diff --git a/lib/idp_common_pkg/idp_common/assessment/service.py b/lib/idp_common_pkg/idp_common/assessment/service.py index b8949c78..4ffe976a 100644 --- a/lib/idp_common_pkg/idp_common/assessment/service.py +++ b/lib/idp_common_pkg/idp_common/assessment/service.py @@ -17,9 +17,15 @@ import logging import os import time -from typing import Any, Dict, List, Union +from typing import Any from idp_common import bedrock, image, metrics, s3, utils +from idp_common.assessment.models import ( + ConfidenceAlert, + DocumentContent, + ExtractionData, +) +from idp_common.bedrock import format_prompt from idp_common.config.models import IDPConfig from idp_common.config.schema_constants import ( SCHEMA_DESCRIPTION, @@ -34,57 +40,37 @@ X_AWS_IDP_LIST_ITEM_DESCRIPTION, ) from idp_common.models import Document +from idp_common.ocr.service import OcrService from idp_common.utils import extract_json_from_text logger = logging.getLogger(__name__) -def _safe_float_conversion(value: Any, default: float = 0.0) -> float: +class AssessmentService: """ - Safely convert a value to float, handling strings and None values. - - Args: - value: Value to convert to float - default: Default value if conversion fails - - Returns: - Float value or default if conversion fails + Service for assessing extraction result confidence using LLMs. + + This service evaluates extraction results by analyzing them against source documents, + providing confidence scores and optional bounding box information for each extracted field. + + The class is organized into the following sections: + 1. INITIALIZATION - Setup and configuration + 2. SCHEMA & CONFIGURATION - Schema lookup and property formatting + 3. CONFIDENCE THRESHOLD HANDLING - Threshold validation and alert generation + 4. PROMPT BUILDING - Template processing and content preparation + 5. DATA LOADING - Loading document content and OCR data + 6. GEOMETRY PROCESSING - Bounding box conversion and validation + 7. PUBLIC API - Main entry points for assessment """ - if value is None: - return default - - if isinstance(value, (int, float)): - return float(value) - - if isinstance(value, str): - # Handle empty strings - if not value.strip(): - return default - try: - return float(value) - except (ValueError, TypeError): - logger.warning( - f"Could not convert string '{value}' to float, using default {default}" - ) - return default - - # Handle other types by attempting conversion - try: - return float(value) - except (ValueError, TypeError): - logger.warning( - f"Could not convert {type(value)} '{value}' to float, using default {default}" - ) - return default - -class AssessmentService: - """Service for assessing extraction result confidence using LLMs.""" + # ============================================================================ + # INITIALIZATION + # ============================================================================ def __init__( self, region: str | None = None, - config: Union[Dict[str, Any], IDPConfig, None] = None, + config: dict[str, Any] | IDPConfig | None = None, ): """ Initialize the assessment service. @@ -117,7 +103,11 @@ def __init__( model_id = self.config.assessment.model logger.info(f"Initialized assessment service with model {model_id}") - def _get_class_schema(self, class_label: str) -> Dict[str, Any]: + # ============================================================================ + # SCHEMA & CONFIGURATION + # ============================================================================ + + def _get_class_schema(self, class_label: str) -> dict[str, Any]: """ Get JSON Schema for a specific document class. @@ -134,7 +124,7 @@ def _get_class_schema(self, class_label: str) -> Dict[str, Any]: return schema return {} - def _format_property_descriptions(self, schema: Dict[str, Any]) -> str: + def _format_property_descriptions(self, schema: dict[str, Any]) -> str: """ Format property descriptions from JSON Schema for the prompt. @@ -178,9 +168,13 @@ def _format_property_descriptions(self, schema: Dict[str, Any]) -> str: return "\n".join(formatted_lines) + # ============================================================================ + # CONFIDENCE THRESHOLD HANDLING + # ============================================================================ + def _enhance_dict_assessment( - self, assessment_dict: Dict[str, Any], threshold: float - ) -> Dict[str, Any]: + self, assessment_dict: dict[str, Any], threshold: float + ) -> dict[str, Any]: """ Enhance an assessment dictionary by adding confidence thresholds to confidence assessments. @@ -228,12 +222,31 @@ def _enhance_dict_assessment( enhanced[key] = value return enhanced + def _get_confidence_threshold( + self, prop_schema: dict[str, Any], default: float + ) -> float: + """ + Get confidence threshold from property schema with validation. + + Args: + prop_schema: Property schema dictionary + default: Default threshold if not specified in schema + + Returns: + Validated float threshold value + """ + value = prop_schema.get(X_AWS_IDP_CONFIDENCE_THRESHOLD, default) + # Use ConfidenceAlert's validator to parse the float safely + return ConfidenceAlert( + attribute_name="", confidence=0.0, confidence_threshold=value + ).confidence_threshold + def _check_confidence_alerts( self, - assessment_data: Dict[str, Any], + assessment_data: dict[str, Any], attr_name: str, threshold: float, - alerts_list: List[Dict[str, Any]], + alerts_list: list[ConfidenceAlert], ) -> None: """ Check assessment data for confidence threshold violations and add alerts. @@ -252,48 +265,41 @@ def _check_confidence_alerts( ) return - # Safety check: ensure threshold is a valid float - safe_threshold = _safe_float_conversion(threshold, 0.9) - # First check if this assessment_data itself is a direct confidence assessment if "confidence" in assessment_data: - confidence = _safe_float_conversion( - assessment_data.get("confidence", 0.0), 0.0 + alert = ConfidenceAlert( + attribute_name=attr_name, + confidence=assessment_data.get("confidence", 0.0), + confidence_threshold=threshold, ) - if confidence < safe_threshold: - alerts_list.append( - { - "attribute_name": attr_name, - "confidence": confidence, - "confidence_threshold": safe_threshold, - } - ) + if alert.confidence < alert.confidence_threshold: + alerts_list.append(alert) # Then check for nested sub-attributes (for group/complex attributes) for sub_attr_name, sub_assessment in assessment_data.items(): if isinstance(sub_assessment, dict) and "confidence" in sub_assessment: - confidence = _safe_float_conversion( - sub_assessment.get("confidence", 0.0), 0.0 + full_attr_name = ( + f"{attr_name}.{sub_attr_name}" + if "." not in attr_name + else f"{attr_name}.{sub_attr_name}" ) - if confidence < safe_threshold: - full_attr_name = ( - f"{attr_name}.{sub_attr_name}" - if "." not in attr_name - else f"{attr_name}.{sub_attr_name}" - ) - alerts_list.append( - { - "attribute_name": full_attr_name, - "confidence": confidence, - "confidence_threshold": safe_threshold, - } - ) + alert = ConfidenceAlert( + attribute_name=full_attr_name, + confidence=sub_assessment.get("confidence", 0.0), + confidence_threshold=threshold, + ) + if alert.confidence < alert.confidence_threshold: + alerts_list.append(alert) + + # ============================================================================ + # PROMPT BUILDING + # ============================================================================ def _prepare_prompt_from_template( self, prompt_template: str, - substitutions: Dict[str, str], - required_placeholders: List[str] = None, + substitutions: dict[str, str], + required_placeholders: list[str] | None = None, ) -> str: """ Prepare prompt from template by replacing placeholders with values. @@ -309,8 +315,6 @@ def _prepare_prompt_from_template( Raises: ValueError: If a required placeholder is missing from the template """ - from idp_common.bedrock import format_prompt - return format_prompt(prompt_template, substitutions, required_placeholders) def _build_content_with_or_without_image_placeholder( @@ -322,7 +326,7 @@ def _build_content_with_or_without_image_placeholder( extraction_results: str, ocr_text_confidence: str = "", image_content: Any = None, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """ Build content array, automatically deciding whether to use image placeholder processing. @@ -368,7 +372,7 @@ def _build_content_with_image_placeholder( extraction_results: str, ocr_text_confidence: str, image_content: Any = None, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """ Build content array with image inserted at DOCUMENT_IMAGE placeholder if present. @@ -453,7 +457,7 @@ def _build_content_without_image_placeholder( extraction_results: str, ocr_text_confidence: str, image_content: Any = None, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """ Build content array without DOCUMENT_IMAGE placeholder (text-only processing). @@ -485,6 +489,101 @@ def _build_content_without_image_placeholder( # Return text content only - no images unless DOCUMENT_IMAGE placeholder is used return [{"text": task_prompt}] + # ============================================================================ + # DATA LOADING + # ============================================================================ + + def _load_extraction_data(self, section) -> ExtractionData: + """ + Load extraction results from S3. + + Args: + section: Section object containing extraction_result_uri + + Returns: + ExtractionData with extraction_results and full_data + + Raises: + ValueError: If no extraction results found + """ + extraction_data = s3.get_json_content(section.extraction_result_uri) + extraction_results = extraction_data.get("inference_result", {}) + + if not extraction_results: + raise ValueError( + f"No extraction results found for section {section.section_id}" + ) + + return ExtractionData( + extraction_results=extraction_results, full_data=extraction_data + ) + + def _load_document_content(self, document: Document, section) -> DocumentContent: + """ + Load document text, images, and OCR confidence for all pages in section. + + Args: + document: Document object containing pages + section: Section object with page_ids + + Returns: + DocumentContent with document_text, page_images, and ocr_text_confidence + """ + # Sort pages by page number + sorted_page_ids = sorted(section.page_ids, key=int) + + # Read document text from all pages in order + document_texts = [] + for page_id in sorted_page_ids: + if page_id not in document.pages: + logger.warning(f"Page {page_id} not found in document") + continue + + page = document.pages[page_id] + text_path = page.parsed_text_uri + if text_path: + page_text = s3.get_text_content(text_path) + document_texts.append(page_text) + + document_text = "\n".join(document_texts) + + # Read page images with configurable dimensions + target_width = self.config.assessment.image.target_width + target_height = self.config.assessment.image.target_height + + page_images = [] + for page_id in sorted_page_ids: + if page_id not in document.pages: + continue + + page = document.pages[page_id] + image_uri = page.image_uri + if image_uri: + image_content = image.prepare_image( + image_uri, target_width, target_height + ) + page_images.append(image_content) + + # Read text confidence data for confidence information + ocr_text_confidence = "" + for page_id in sorted_page_ids: + if page_id not in document.pages: + continue + + page = document.pages[page_id] + text_confidence_data_str = self._get_text_confidence_data(page) + if text_confidence_data_str: + ocr_text_confidence += ( + f"\n--- Page {page_id} Text Confidence Data ---\n" + ) + ocr_text_confidence += text_confidence_data_str + + return DocumentContent( + document_text=document_text, + page_images=page_images, + ocr_text_confidence=ocr_text_confidence, + ) + def _get_text_confidence_data(self, page) -> str: """ Get text confidence data for a page from pre-generated text confidence files. @@ -508,8 +607,6 @@ def _get_text_confidence_data(self, page) -> str: # Fallback: use raw OCR data if text confidence is not available (for backward compatibility) if page.raw_text_uri: try: - from idp_common.ocr.service import OcrService - ocr_service = OcrService() raw_ocr_data = s3.get_json_content(page.raw_text_uri) text_confidence_data = ocr_service._generate_text_confidence_data( @@ -523,9 +620,13 @@ def _get_text_confidence_data(self, page) -> str: return "" + # ============================================================================ + # GEOMETRY PROCESSING + # ============================================================================ + def _convert_bbox_to_geometry( - self, bbox_coords: List[float], page_num: int - ) -> Dict[str, Any]: + self, bbox_coords: list[float], page_num: int + ) -> dict[str, Any]: """ Convert [x1,y1,x2,y2] coordinates to geometry format. @@ -557,8 +658,8 @@ def _convert_bbox_to_geometry( } def _process_single_assessment_geometry( - self, attr_assessment: Dict[str, Any], attr_name: str = "" - ) -> Dict[str, Any]: + self, attr_assessment: dict[str, Any], attr_name: str = "" + ) -> dict[str, Any]: """ Process geometry data for a single assessment (with confidence key). @@ -615,8 +716,8 @@ def _process_single_assessment_geometry( return enhanced_attr def _extract_geometry_from_assessment( - self, assessment_data: Dict[str, Any] - ) -> Dict[str, Any]: + self, assessment_data: dict[str, Any] + ) -> dict[str, Any]: """ Extract geometry data from assessment response and convert to proper format. Now supports recursive processing of nested group attributes. @@ -666,32 +767,225 @@ def _extract_geometry_from_assessment( return enhanced_assessment - def process_document_section(self, document: Document, section_id: str) -> Document: + # ============================================================================ + # RESULT PROCESSING + # ============================================================================ + + def _process_assessment_response( + self, + assessment_text: str, + extraction_results: dict[str, Any], + class_schema: dict[str, Any], + ) -> tuple[dict[str, Any], list[ConfidenceAlert], bool]: """ - Process a single section from a Document object to assess extraction confidence. + Process raw assessment response from LLM. Args: - document: Document object containing section to process - section_id: ID of the section to process + assessment_text: Raw text response from LLM + extraction_results: Original extraction results + class_schema: JSON Schema for the document class Returns: - Document: Updated Document object with assessment results appended to extraction results + Tuple of (enhanced_assessment_data, confidence_alerts, parsing_succeeded) """ - # Check if assessment is enabled in typed configuration - enabled = self.config.assessment.enabled - if not enabled: - logger.info("Assessment is disabled via configuration") - return document + # Parse response into JSON + assessment_data = {} + parsing_succeeded = True + + try: + assessment_data = json.loads(extract_json_from_text(assessment_text)) + except Exception as e: + logger.error( + f"Error parsing assessment LLM output - invalid JSON?: {assessment_text} - {e}" + ) + logger.info("Using default confidence scores.") + # Create default assessments for all extracted attributes + assessment_data = {} + for attr_name in extraction_results.keys(): + assessment_data[attr_name] = { + "confidence": 0.5, + "confidence_reason": "Unable to parse assessment response - default score assigned", + } + parsing_succeeded = False + + # Process bounding boxes automatically if bbox data is present + try: + logger.debug("Checking for bounding box data in assessment response") + assessment_data = self._extract_geometry_from_assessment(assessment_data) + except Exception as e: + logger.warning(f"Failed to extract geometry data: {str(e)}") + + # Enhance assessment data with confidence thresholds and create alerts + enhanced_assessment_data, confidence_alerts = ( + self._enhance_and_check_thresholds(assessment_data, class_schema) + ) + + return enhanced_assessment_data, confidence_alerts, parsing_succeeded + + def _enhance_and_check_thresholds( + self, assessment_data: dict[str, Any], class_schema: dict[str, Any] + ) -> tuple[dict[str, Any], list[ConfidenceAlert]]: + """ + Enhance assessment data with thresholds and generate alerts. + + Args: + assessment_data: Raw assessment data from LLM + class_schema: JSON Schema for the document class + + Returns: + Tuple of (enhanced_assessment_data, confidence_alerts) + """ + default_confidence_threshold = ( + self.config.assessment.default_confidence_threshold + ) + + enhanced_assessment_data = {} + confidence_threshold_alerts: list[ConfidenceAlert] = [] + properties = class_schema.get(SCHEMA_PROPERTIES, {}) + + for attr_name, attr_assessment in assessment_data.items(): + prop_schema = properties.get(attr_name, {}) + attr_threshold = self._get_confidence_threshold( + prop_schema, default_confidence_threshold + ) + + # Get property type + prop_type_json = prop_schema.get(SCHEMA_TYPE, TYPE_STRING) + if prop_type_json == TYPE_OBJECT: + attr_type = "group" + elif prop_type_json == TYPE_ARRAY: + attr_type = "list" + else: + attr_type = "simple" + + # Process based on type + if isinstance(attr_assessment, dict): + enhanced_assessment_data[attr_name] = self._enhance_dict_assessment( + attr_assessment, attr_threshold + ) + self._check_confidence_alerts( + attr_assessment, + attr_name, + attr_threshold, + confidence_threshold_alerts, + ) + + elif isinstance(attr_assessment, list) and attr_type == "list": + enhanced_list = [] + for i, item_assessment in enumerate(attr_assessment): + if isinstance(item_assessment, dict): + enhanced_item = self._enhance_dict_assessment( + item_assessment, attr_threshold + ) + enhanced_list.append(enhanced_item) + self._check_confidence_alerts( + item_assessment, + f"{attr_name}[{i}]", + attr_threshold, + confidence_threshold_alerts, + ) + else: + # Unexpected format within list + logger.warning( + f"List item {i} in attribute '{attr_name}' is not a dictionary. Using default confidence." + ) + default_item = { + "confidence": 0.5, + "confidence_reason": f"List item {i} in '{attr_name}' has unexpected format.", + "confidence_threshold": attr_threshold, + } + enhanced_list.append(default_item) + + if 0.5 < attr_threshold: + alert = ConfidenceAlert( + attribute_name=f"{attr_name}[{i}]", + confidence=0.5, + confidence_threshold=attr_threshold, + ) + confidence_threshold_alerts.append(alert) + + enhanced_assessment_data[attr_name] = enhanced_list + + else: + # Unexpected type - use default + logger.warning( + f"Attribute '{attr_name}' has unexpected assessment format. Using default confidence." + ) + default_assessment = { + "confidence": 0.5, + "confidence_reason": f"LLM returned unexpected format for '{attr_name}'.", + "confidence_threshold": attr_threshold, + } + enhanced_assessment_data[attr_name] = default_assessment + + return enhanced_assessment_data, confidence_threshold_alerts + + # ============================================================================ + # ASSESSMENT EXECUTION + # ============================================================================ + + def _execute_bedrock_assessment( + self, content: list[dict[str, Any]] + ) -> tuple[str, dict[str, Any], float]: + """ + Execute Bedrock model invocation for assessment. + + Args: + content: Formatted content for the model - # Validate input document + Returns: + Tuple of (assessment_text, metering_data, duration_seconds) + """ + # Get assessment configuration + model_id = self.config.assessment.model + if not model_id: + raise ValueError("Assessment model_id is required but not configured") + + request_start_time = time.time() + + # Invoke Bedrock + response_with_metering = bedrock.invoke_model( + model_id=model_id, + system_prompt=self.config.assessment.system_prompt, + content=content, + temperature=self.config.assessment.temperature, + top_k=self.config.assessment.top_k, + top_p=self.config.assessment.top_p, + max_tokens=self.config.assessment.max_tokens, + context="Assessment", + ) + + total_duration = time.time() - request_start_time + + # Extract text from response + assessment_text = bedrock.extract_text_from_response(response_with_metering) + metering = response_with_metering.get("metering", {}) + + return assessment_text, metering, total_duration + + # ============================================================================ + # VALIDATION & HELPERS + # ============================================================================ + + def _validate_and_get_section(self, document: Document, section_id: str): + """ + Validate document and return the section to process. + + Args: + document: Document object to validate + section_id: ID of section to retrieve + + Returns: + Section object + + Raises: + ValueError: If validation fails + """ if not document: - logger.error("No document provided") - return document + raise ValueError("No document provided") if not document.sections: - logger.error("Document has no sections to process") - document.errors.append("Document has no sections to process") - return document + raise ValueError("Document has no sections to process") # Find the section with the given ID section = None @@ -701,28 +995,48 @@ def process_document_section(self, document: Document, section_id: str) -> Docum break if not section: - error_msg = f"Section {section_id} not found in document" - logger.error(error_msg) - document.errors.append(error_msg) - return document + raise ValueError(f"Section {section_id} not found in document") - # Check if section has extraction results to assess if not section.extraction_result_uri: - error_msg = f"Section {section_id} has no extraction results to assess" - logger.error(error_msg) - document.errors.append(error_msg) - return document - - # Extract information about the section - class_label = section.classification + raise ValueError( + f"Section {section_id} has no extraction results to assess" + ) - # Check if the section has required pages if not section.page_ids: - error_msg = f"Section {section_id} has no page IDs" - logger.error(error_msg) - document.errors.append(error_msg) + raise ValueError(f"Section {section_id} has no page IDs") + + return section + + # ============================================================================ + # PUBLIC API + # ============================================================================ + + def process_document_section(self, document: Document, section_id: str) -> Document: + """ + Process a single section from a Document object to assess extraction confidence. + + Args: + document: Document object containing section to process + section_id: ID of the section to process + + Returns: + Document: Updated Document object with assessment results appended to extraction results + """ + # Check if assessment is enabled + if not self.config.assessment.enabled: + logger.info("Assessment is disabled via configuration") return document + # Validate and get section + try: + section = self._validate_and_get_section(document, section_id) + except ValueError as e: + logger.error(str(e)) + document.errors.append(str(e)) + return document + + class_label = section.classification + # Sort pages by page number sorted_page_ids = sorted(section.page_ids, key=int) start_page = int(sorted_page_ids[0]) @@ -736,81 +1050,17 @@ def process_document_section(self, document: Document, section_id: str) -> Docum metrics.put_metric("InputDocumentPagesForAssessment", len(section.page_ids)) try: - # Read existing extraction results + # Load extraction data t0 = time.time() - extraction_data = s3.get_json_content(section.extraction_result_uri) - extraction_results = extraction_data.get("inference_result", {}) - - # Skip assessment if no extraction results found - if not extraction_results: - logger.warning(f"No extraction results found for section {section_id}") - return document - + extraction_data_model = self._load_extraction_data(section) + extraction_results = extraction_data_model.extraction_results t1 = time.time() - logger.info(f"Time taken to read extraction results: {t1 - t0:.2f} seconds") - - # Read document text from all pages in order - document_texts = [] - for page_id in sorted_page_ids: - if page_id not in document.pages: - error_msg = f"Page {page_id} not found in document" - logger.error(error_msg) - document.errors.append(error_msg) - continue - - page = document.pages[page_id] - text_path = page.parsed_text_uri - page_text = s3.get_text_content(text_path) - document_texts.append(page_text) + logger.info(f"Time taken to load extraction data: {t1 - t0:.2f} seconds") - document_text = "\n".join(document_texts) + # Load document content (text, images, OCR confidence) + document_content = self._load_document_content(document, section) t2 = time.time() - logger.info(f"Time taken to read text content: {t2 - t1:.2f} seconds") - - # Read page images with configurable dimensions (type-safe access) - target_width = self.config.assessment.image.target_width - target_height = self.config.assessment.image.target_height - - page_images = [] - for page_id in sorted_page_ids: - if page_id not in document.pages: - continue - - page = document.pages[page_id] - image_uri = page.image_uri - # Just pass the values directly - prepare_image handles empty strings/None - image_content = image.prepare_image( - image_uri, target_width, target_height - ) - page_images.append(image_content) - - t3 = time.time() - logger.info(f"Time taken to read images: {t3 - t2:.2f} seconds") - - # Read text confidence data for confidence information - ocr_text_confidence = "" - for page_id in sorted_page_ids: - if page_id not in document.pages: - continue - - page = document.pages[page_id] - text_confidence_data_str = self._get_text_confidence_data(page) - if text_confidence_data_str: - ocr_text_confidence += ( - f"\n--- Page {page_id} Text Confidence Data ---\n" - ) - ocr_text_confidence += text_confidence_data_str - - t4 = time.time() - logger.info(f"Time taken to read raw OCR results: {t4 - t3:.2f} seconds") - - # Get assessment configuration (type-safe access, Pydantic handles conversions) - model_id = self.config.assessment.model - temperature = self.config.assessment.temperature - top_k = self.config.assessment.top_k - top_p = self.config.assessment.top_p - max_tokens = self.config.assessment.max_tokens - system_prompt = self.config.assessment.system_prompt + logger.info(f"Time taken to load document content: {t2 - t1:.2f} seconds") # Get schema for this document class class_schema = self._get_class_schema(class_label) @@ -832,12 +1082,12 @@ def process_document_section(self, document: Document, section_id: str) -> Docum try: content = self._build_content_with_or_without_image_placeholder( prompt_template, - document_text, + document_content.document_text, class_label, property_descriptions, extraction_results_str, - ocr_text_confidence, - page_images, # Pass images to the content builder + document_content.ocr_text_confidence, + document_content.page_images, ) except ValueError as e: logger.error(f"Error formatting prompt template: {str(e)}") @@ -849,203 +1099,53 @@ def process_document_section(self, document: Document, section_id: str) -> Docum f"Assessing extraction confidence for {class_label} document, section {section_id}" ) - # Time the model invocation - request_start_time = time.time() - - # Invoke Bedrock with the common library - response_with_metering = bedrock.invoke_model( - model_id=model_id, - system_prompt=system_prompt, - content=content, - temperature=temperature, - top_k=top_k, - top_p=top_p, - max_tokens=max_tokens, - context="Assessment", + # Execute Bedrock assessment + assessment_text, metering, total_duration = ( + self._execute_bedrock_assessment(content) ) - - total_duration = time.time() - request_start_time logger.info(f"Time taken for assessment: {total_duration:.2f} seconds") - # Extract text from response - assessment_text = bedrock.extract_text_from_response(response_with_metering) - metering = response_with_metering.get("metering", {}) - - # Parse response into JSON - assessment_data = {} - parsing_succeeded = True # Flag to track if parsing was successful - - try: - # Try to parse the assessment text as JSON - assessment_data = json.loads(extract_json_from_text(assessment_text)) - except Exception as e: - # Handle parsing error - logger.error( - f"Error parsing assessment LLM output - invalid JSON?: {assessment_text} - {e}" - ) - logger.info("Using default confidence scores.") - # Create default assessments for all extracted attributes - assessment_data = {} - for attr_name in extraction_results.keys(): - assessment_data[attr_name] = { - "confidence": 0.5, - "confidence_reason": "Unable to parse assessment response - default score assigned", - } - parsing_succeeded = False # Mark that parsing failed - - # Process bounding boxes automatically if bbox data is present - try: - logger.debug("Checking for bounding box data in assessment response") - assessment_data = self._extract_geometry_from_assessment( - assessment_data - ) - except Exception as e: - logger.warning(f"Failed to extract geometry data: {str(e)}") - # Continue with assessment even if geometry extraction fails - - # Get confidence thresholds (type-safe, already float from Pydantic) - default_confidence_threshold = ( - self.config.assessment.default_confidence_threshold + # Process assessment response + ( + enhanced_assessment_data, + confidence_threshold_alerts, + parsing_succeeded, + ) = self._process_assessment_response( + assessment_text, extraction_results, class_schema ) - # Enhance assessment data with confidence thresholds and create confidence threshold alerts - enhanced_assessment_data = {} - confidence_threshold_alerts = [] - - # Get properties dict once for efficient access - properties = class_schema.get(SCHEMA_PROPERTIES, {}) - - for attr_name, attr_assessment in assessment_data.items(): - # Get property schema (if it exists in schema) - prop_schema = properties.get(attr_name, {}) - - # Get threshold for this property - attr_threshold = _safe_float_conversion( - prop_schema.get( - X_AWS_IDP_CONFIDENCE_THRESHOLD, default_confidence_threshold - ), - default_confidence_threshold, - ) - - # Get property type - prop_type_json = prop_schema.get(SCHEMA_TYPE, TYPE_STRING) - - # Map JSON Schema type to legacy attribute type for existing logic - if prop_type_json == TYPE_OBJECT: - attr_type = "group" - elif prop_type_json == TYPE_ARRAY: - attr_type = "list" - else: - attr_type = "simple" - - # Check if attr_assessment is a dictionary (expected format for simple/group attributes) - if isinstance(attr_assessment, dict): - # For simple attributes or group attributes - add confidence_threshold to each confidence assessment - enhanced_assessment_data[attr_name] = self._enhance_dict_assessment( - attr_assessment, attr_threshold - ) - - # Check for confidence threshold alerts in the assessment - self._check_confidence_alerts( - attr_assessment, - attr_name, - attr_threshold, - confidence_threshold_alerts, - ) - - elif isinstance(attr_assessment, list): - # Handle list attributes (expected format for LIST attributes like transactions) - if attr_type == "list": - # This is expected for list attributes - process each item in the list - enhanced_list = [] - for i, item_assessment in enumerate(attr_assessment): - if isinstance(item_assessment, dict): - enhanced_item = self._enhance_dict_assessment( - item_assessment, attr_threshold - ) - enhanced_list.append(enhanced_item) - - # Check for confidence threshold alerts in list items - self._check_confidence_alerts( - item_assessment, - f"{attr_name}[{i}]", - attr_threshold, - confidence_threshold_alerts, - ) - else: - # Handle unexpected format within list - logger.warning( - f"List item {i} in attribute '{attr_name}' is not a dictionary. " - f"Expected dict, got {type(item_assessment)}. Using default confidence." - ) - default_item = { - "confidence": 0.5, - "confidence_reason": f"List item {i} in '{attr_name}' has unexpected format. Using default confidence.", - "confidence_threshold": attr_threshold, - } - enhanced_list.append(default_item) - - # Add alert for default confidence - if 0.5 < attr_threshold: - confidence_threshold_alerts.append( - { - "attribute_name": f"{attr_name}[{i}]", - "confidence": 0.5, - "confidence_threshold": attr_threshold, - } - ) - - enhanced_assessment_data[attr_name] = enhanced_list - else: - # List format for non-list attribute is unexpected - logger.warning( - f"Attribute '{attr_name}' (type: {attr_type}) assessment is a list but attribute is not configured as list type. " - f"Using default confidence." - ) - - # Create a default assessment structure - default_assessment = { - "confidence": 0.5, - "confidence_reason": f"LLM returned list format for non-list attribute '{attr_name}'. Using default confidence (0.5) and threshold ({attr_threshold}).", - "confidence_threshold": attr_threshold, - } - enhanced_assessment_data[attr_name] = default_assessment - - else: - # Handle other unexpected types - logger.warning( - f"Attribute '{attr_name}' assessment is of unexpected type {type(attr_assessment)}. " - f"Expected dictionary or list (for list attributes). Using default confidence." - ) - - # Create a default assessment structure - default_assessment = { - "confidence": 0.5, - "confidence_reason": f"LLM returned unexpected type {type(attr_assessment)} for attribute '{attr_name}'. Using default confidence (0.5) and threshold ({attr_threshold}).", - "confidence_threshold": attr_threshold, - } - enhanced_assessment_data[attr_name] = default_assessment - # Update the existing extraction result with enhanced assessment data - extraction_data["explainability_info"] = [enhanced_assessment_data] - extraction_data["metadata"] = extraction_data.get("metadata", {}) - extraction_data["metadata"]["assessment_time_seconds"] = total_duration - extraction_data["metadata"]["assessment_parsing_succeeded"] = ( - parsing_succeeded + extraction_data_model.full_data["explainability_info"] = [ + enhanced_assessment_data + ] + extraction_data_model.full_data["metadata"] = ( + extraction_data_model.full_data.get("metadata", {}) + ) + extraction_data_model.full_data["metadata"]["assessment_time_seconds"] = ( + total_duration ) + extraction_data_model.full_data["metadata"][ + "assessment_parsing_succeeded" + ] = parsing_succeeded # Write the updated result back to S3 + # extraction_result_uri is guaranteed to exist by _validate_and_get_section + assert section.extraction_result_uri is not None bucket, key = utils.parse_s3_uri(section.extraction_result_uri) s3.write_content( - extraction_data, bucket, key, content_type="application/json" + extraction_data_model.full_data, + bucket, + key, + content_type="application/json", ) # Update the section in the document with confidence threshold alerts for doc_section in document.sections: if doc_section.section_id == section_id: - doc_section.confidence_threshold_alerts = ( - confidence_threshold_alerts - ) + # Convert ConfidenceAlert objects to dicts + doc_section.confidence_threshold_alerts = [ + alert.model_dump() for alert in confidence_threshold_alerts + ] break # Update document with metering data diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index 04da4fc2..f4963261 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -11,18 +11,17 @@ from typing import Any from aws_lambda_powertools import Logger -from botocore.config import Config from strands import Agent from strands.agent.conversation_manager import SummarizingConversationManager from strands.models.bedrock import BedrockModel -from strands.types.content import ContentBlock, Message +from strands.types.content import CachePoint, ContentBlock, Message +from strands.types.media import ImageContent, ImageSource from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_models import AssessmentOutput from idp_common.assessment.strands_tools import create_strands_tools -from idp_common.utils.bedrock_utils import ( - async_exponential_backoff_retry, -) +from idp_common.bedrock import build_model_config +from idp_common.utils.bedrock_utils import async_exponential_backoff_retry logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) @@ -63,168 +62,193 @@ async def assess_attribute_with_strands( """ start_time = time.time() - try: - # 1. Create tools (image viewer + todo list + submit assessment) - base_tools = create_strands_tools(page_images, sorted_page_ids) - tools = base_tools - # 2. Build enhanced system prompt with schema and extraction (for caching) - enhanced_system_prompt = _build_system_prompt_with_context( - system_prompt, document_schema, extraction_results, len(page_images) - ) + # 1. Create tools (image viewer + todo list + submit assessment) + tools = create_strands_tools(page_images, sorted_page_ids) - # 3. Build minimal task-specific prompt (just field path and threshold) - task_prompt = _build_task_prompt(task) - - # 4. Create Bedrock model config (following agentic_idp.py pattern) - boto_config = Config( - retries={ - "max_attempts": max_retries, - "mode": "adaptive", - }, - connect_timeout=connect_timeout, - read_timeout=read_timeout, - ) + # 2. Build enhanced system prompt with schema and extraction (for caching) + enhanced_system_prompt = _build_system_prompt_with_context( + system_prompt, document_schema, extraction_results, len(page_images) + ) - model_config = { - "model_id": model_id, - "temperature": temperature, - "max_tokens": max_tokens, - "boto_client_config": boto_config, - } + # 3. Build minimal task-specific prompt (just field path and threshold) + task_prompt = _build_task_prompt(task, page_images) - # 5. Initialize Strands agent - agent = Agent( - model=BedrockModel(**model_config), - tools=tools, - system_prompt=enhanced_system_prompt, - state={ - "task": task.model_dump(), - "extraction_results": extraction_results, - "assessment_output": None, - }, - conversation_manager=SummarizingConversationManager( - summary_ratio=0.8, preserve_recent_messages=1 - ), - ) + # 4. Create Bedrock model config using shared utility + model_config = build_model_config( + model_id=model_id, + max_tokens=max_tokens, + max_retries=max_retries, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + # Add temperature (not handled by build_model_config) + model_config["temperature"] = temperature + + # 5. Initialize Strands agent + agent = Agent( + model=BedrockModel(**model_config), + tools=tools, + system_prompt=enhanced_system_prompt, + state={ + "task": task.model_dump(), + "extraction_results": extraction_results, + "assessment_output": None, + }, + conversation_manager=SummarizingConversationManager( + summary_ratio=0.8, preserve_recent_messages=1 + ), + ) - # 5. Create user message with task prompt - user_message = Message(role="user", content=[ContentBlock(text=task_prompt)]) - - # 6. Run agent - logger.info( - "Starting Strands assessment", - extra={ - "task_id": task.task_id, - "task_type": task.task_type, - "field_name": task.field_name, - }, - ) + # 6. Create user message and run agent with retry + user_message = Message(role="user", content=task_prompt) - @async_exponential_backoff_retry( - max_retries=30, - initial_delay=5, - exponential_base=2, - jitter=0.5, - max_delay=900, - ) - async def invoke_agent_with_retry(): - return await agent.invoke_async([user_message]) + logger.info( + "Starting Strands assessment", + extra={ + "task_id": task.task_id, + "task_type": task.task_type, + "field_name": task.field_name, + }, + ) + @async_exponential_backoff_retry( + max_retries=30, + initial_delay=5, + exponential_base=2, + jitter=0.5, + max_delay=900, + ) + async def invoke_agent_with_retry(): + return await agent.invoke_async([user_message]) + + try: response = await invoke_agent_with_retry() logger.debug("Agent response received", extra={"task_id": task.task_id}) - - # 7. Extract assessment from agent state - assessment_dict = agent.state.get("assessment_output") - if not assessment_dict: - raise ValueError( - "Agent did not produce assessment output. Agent may not have called submit_assessment tool." - ) - - # Validate to Pydantic model - assessment_output = AssessmentOutput(**assessment_dict) - - # Validate that agent assessed the expected field - # The agent may return: - # - Just the field name: "Street" - # - Full path with dots: "VendorAddress.Street" - # - Full path with array indices: "Items[0].Description" - # We accept any of these as long as the expected field_name appears - expected_field = task.field_name - assessed_field = assessment_output.field_name - - # Check if fields match: - # 1. Exact match - # 2. Expected field is at the end after a dot: "VendorAddress.Street" ends with ".Street" - # 3. Expected field is at the end after bracket: "Items[0].Description" ends with ".Description" - if not ( - assessed_field == expected_field - or assessed_field.endswith(f".{expected_field}") - or assessed_field.endswith(f"]{expected_field}") - or f".{expected_field}" in assessed_field - or f"]{expected_field}" in assessed_field - ): - raise ValueError( - f"Agent assessed wrong field: expected '{expected_field}', " - f"got '{assessed_field}'" - ) - - # 8. Extract metering from response - metering = {} - if response.metrics and response.metrics.accumulated_usage: - token_usage = { - "inputTokens": response.metrics.accumulated_usage.get("inputTokens", 0), - "outputTokens": response.metrics.accumulated_usage.get( - "outputTokens", 0 - ), - "totalTokens": response.metrics.accumulated_usage.get("totalTokens", 0), - "cacheReadInputTokens": response.metrics.accumulated_usage.get( - "cacheReadInputTokens", 0 - ), - "cacheWriteInputTokens": response.metrics.accumulated_usage.get( - "cacheWriteInputTokens", 0 - ), - } - metering[f"assessment/bedrock/{model_id}"] = token_usage - - # 9. Convert to AssessmentResult format - result = _convert_to_assessment_result( - task, - assessment_output, - metering, - time.time() - start_time, + except Exception as e: + logger.error( + "Agent invocation failed", + extra={"task_id": task.task_id, "error": str(e)}, ) - - logger.info( - "Assessment completed successfully", - extra={ - "task_id": task.task_id, - "processing_time": result.processing_time, - "success": result.success, - }, + return AssessmentResult( + task_id=task.task_id, + success=False, + assessment_data={}, + confidence_alerts=[], + error_message=f"Agent invocation failed: {str(e)}", + processing_time=time.time() - start_time, ) - return result + # 7. Extract and validate assessment from agent state + assessment_dict = agent.state.get("assessment_output") + if not assessment_dict: + return AssessmentResult( + task_id=task.task_id, + success=False, + assessment_data={}, + confidence_alerts=[], + error_message="Agent did not produce assessment output. Agent may not have called submit_assessment tool.", + processing_time=time.time() - start_time, + ) + try: + assessment_output = AssessmentOutput(**assessment_dict) except Exception as e: - # Return failed result - logger.error( - "Assessment failed", - extra={ - "task_id": task.task_id, - "error": str(e), - "processing_time": time.time() - start_time, - }, + return AssessmentResult( + task_id=task.task_id, + success=False, + assessment_data={}, + confidence_alerts=[], + error_message=f"Invalid assessment output format: {str(e)}", + processing_time=time.time() - start_time, ) + # Validate that agent assessed the expected field + if not _field_names_match(task.field_name, assessment_output.field_name): return AssessmentResult( task_id=task.task_id, success=False, assessment_data={}, confidence_alerts=[], - error_message=str(e), + error_message=f"Agent assessed wrong field: expected '{task.field_name}', got '{assessment_output.field_name}'", processing_time=time.time() - start_time, ) + # 8. Extract metering from response + metering = _extract_metering(response, model_id) + + # 9. Convert to AssessmentResult format + result = _convert_to_assessment_result( + task, + assessment_output, + metering, + time.time() - start_time, + ) + + logger.info( + "Assessment completed successfully", + extra={ + "task_id": task.task_id, + "processing_time": result.processing_time, + "success": result.success, + }, + ) + + return result + + +def _field_names_match(expected: str, actual: str) -> bool: + """ + Check if field names match, handling nested paths with dots and array indices. + + Examples: + - "address.street" matches "address.street" + - "items[0].price" matches "items[0].price" + - "address" matches "address" + + Args: + expected: Expected field name/path + actual: Actual field name/path from agent + + Returns: + True if field names match + """ + return expected == actual + + +def _extract_metering(response: Any, model_id: str) -> dict[str, Any]: + """ + Extract metering data from Strands AgentResult response. + + Args: + response: AgentResult from agent.invoke_async() (has metrics attribute) + model_id: Model ID for metering key + + Returns: + Metering dict with token usage, or empty dict if no metrics + """ + metering = {} + # AgentResult has metrics attribute at runtime (from Strands) + if ( + hasattr(response, "metrics") + and response.metrics + and hasattr(response.metrics, "accumulated_usage") + and response.metrics.accumulated_usage + ): # type: ignore[attr-defined] + token_usage = { + "inputTokens": response.metrics.accumulated_usage.get("inputTokens", 0), # type: ignore[attr-defined] + "outputTokens": response.metrics.accumulated_usage.get("outputTokens", 0), # type: ignore[attr-defined] + "totalTokens": response.metrics.accumulated_usage.get("totalTokens", 0), # type: ignore[attr-defined] + "cacheReadInputTokens": response.metrics.accumulated_usage.get( # type: ignore[attr-defined] + "cacheReadInputTokens", 0 + ), + "cacheWriteInputTokens": response.metrics.accumulated_usage.get( # type: ignore[attr-defined] + "cacheWriteInputTokens", 0 + ), + } + metering[f"assessment/bedrock/{model_id}"] = token_usage + + return metering + def _build_system_prompt_with_context( base_system_prompt: str, @@ -299,41 +323,76 @@ def _build_system_prompt_with_context( """ -def _build_task_prompt(task: AssessmentTask) -> str: +def _convert_field_path_to_string(field_path: tuple[str | int, ...]) -> str: """ - Build minimal task-specific prompt for assessing a single field. + Convert field path tuple to dot notation string. - This is minimal (just field path and threshold) to maximize the benefit - of caching the system prompt which contains the schema and extraction. + Examples: + ("address", "street") → "address.street" + ("items", 0, "price") → "items[0].price" + ("orders", 2, "line_items", 1, "quantity") → "orders[2].line_items[1].quantity" Args: - task: Assessment task for one specific field + field_path: Tuple of field names (str) and array indices (int) Returns: - Minimal task prompt string + Dot notation path string with array indices in brackets """ - # Convert field_path tuple to string representation - # e.g., ("address", "street") -> "address.street" - # e.g., ("items", 0, "price") -> "items[0].price" path_parts = [] - for part in task.field_path: + for part in field_path: if isinstance(part, int): + # Append array index to previous part: "items" → "items[0]" path_parts[-1] = f"{path_parts[-1]}[{part}]" else: + # Add new field name path_parts.append(str(part)) - field_path_str = ".".join(path_parts) - return f"""# Assessment Task + return ".".join(path_parts) + -Assess the confidence of this field: +def _build_task_prompt( + task: AssessmentTask, page_images: list[bytes] +) -> list[ContentBlock]: + """ + Build minimal task-specific prompt for assessing a single field. -**Field Path**: `{field_path_str}` -**Confidence Threshold**: {task.confidence_threshold} + This is minimal (just field path and threshold) to maximize the benefit + of caching the system prompt which contains the schema and extraction. -Locate the value for `{field_path_str}` in the extraction results provided in the system context, verify it against the document images, and submit your assessment. + Args: + task: Assessment task for one specific field + page_images: List of page images to include in the prompt -You MUST assess ONLY this field - do not assess any other fields. -""" + Returns: + List of content blocks with images and task text + """ + field_path_str = _convert_field_path_to_string(task.field_path) + + # Create image content blocks + image_blocks = [ + ContentBlock(image=ImageContent(format="png", source=ImageSource(bytes=img))) + for img in page_images + ] + + # Create task instruction block + task_block = ContentBlock( + text=f"""# Assessment Task + + Assess the confidence of this field: + + **Field Path**: `{field_path_str}` + **Confidence Threshold**: {task.confidence_threshold} + + Locate the value for `{field_path_str}` in the extraction results provided in the system context, verify it against the document images, and submit your assessment. + + You MUST assess ONLY this field - do not assess any other fields. + """ + ) + + # Add cache point after task instructions + cache_block = ContentBlock(cachePoint=CachePoint(type="default")) + + return [*image_blocks, task_block, cache_block] def _convert_to_assessment_result( diff --git a/lib/idp_common_pkg/idp_common/bedrock/__init__.py b/lib/idp_common_pkg/idp_common/bedrock/__init__.py index a2672973..c93d637a 100644 --- a/lib/idp_common_pkg/idp_common/bedrock/__init__.py +++ b/lib/idp_common_pkg/idp_common/bedrock/__init__.py @@ -3,7 +3,13 @@ """Bedrock integration module for IDP Common package.""" -from .client import BedrockClient, invoke_model, default_client +from .client import BedrockClient, default_client, invoke_model +from .model_config import ( + build_model_config, + get_model_max_tokens, + supports_prompt_caching, + supports_tool_caching, +) # Add version info __version__ = "0.1.0" @@ -12,10 +18,14 @@ __all__ = [ "BedrockClient", "invoke_model", - "default_client" + "default_client", + "build_model_config", + "get_model_max_tokens", + "supports_prompt_caching", + "supports_tool_caching", ] # Re-export key functions from the default client for backward compatibility extract_text_from_response = default_client.extract_text_from_response generate_embedding = default_client.generate_embedding -format_prompt = default_client.format_prompt \ No newline at end of file +format_prompt = default_client.format_prompt diff --git a/lib/idp_common_pkg/idp_common/bedrock/model_config.py b/lib/idp_common_pkg/idp_common/bedrock/model_config.py new file mode 100644 index 00000000..0df2da00 --- /dev/null +++ b/lib/idp_common_pkg/idp_common/bedrock/model_config.py @@ -0,0 +1,178 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Bedrock model configuration utilities. + +This module provides utilities for building model configurations with +proper token limits, caching settings, and retry behavior. +""" + +import logging +import re +from typing import Any + +from botocore.config import Config + +from idp_common.bedrock.client import CACHEPOINT_SUPPORTED_MODELS + +logger = logging.getLogger(__name__) + + +def supports_prompt_caching(model_id: str) -> bool: + """ + Check if a model supports prompt caching (cachePoint in system prompt). + + Args: + model_id: The Bedrock model identifier + + Returns: + True if the model supports prompt caching, False otherwise + """ + return model_id in CACHEPOINT_SUPPORTED_MODELS + + +def supports_tool_caching(model_id: str) -> bool: + """ + Check if a model supports tool caching (cachePoint in toolConfig). + + Note: Only Claude models support tool caching. Nova models support + prompt caching but NOT tool caching. + + Args: + model_id: The Bedrock model identifier + + Returns: + True if the model supports tool caching, False otherwise + """ + return "anthropic.claude" in model_id or "us.anthropic.claude" in model_id + + +def get_model_max_tokens(model_id: str) -> int: + """ + Get the maximum output tokens supported by a model. + + Args: + model_id: The Bedrock model identifier + + Returns: + Maximum output tokens for the model + """ + model_id_lower = model_id.lower() + + # Check Claude 4 patterns first (more specific) + if re.search(r"claude-(opus|sonnet|haiku)-4", model_id_lower): + return 64_000 + + # Check Nova models + if any( + nova in model_id_lower + for nova in ["nova-premier", "nova-pro", "nova-lite", "nova-micro"] + ): + return 10_000 + + # Check Claude 3 models + if "claude-3" in model_id_lower: + return 8_192 + + # Default fallback + return 4_096 + + +def build_model_config( + model_id: str, + max_tokens: int | None = None, + max_retries: int = 3, + connect_timeout: float = 60.0, + read_timeout: float = 300.0, +) -> dict[str, Any]: + """ + Build model configuration with token limits and caching settings. + + This function: + 1. Creates boto3 Config with retry and timeout settings + 2. Determines model-specific max token limits + 3. Validates and caps max_tokens if needed + 4. Auto-detects and enables caching support (prompt and tool caching) + + Args: + model_id: Bedrock model identifier (supports us.*, eu.*, and global.anthropic.*) + max_tokens: Optional max tokens override (will be capped at model max) + max_retries: Maximum retry attempts for API calls (default: 3) + connect_timeout: Connection timeout in seconds (default: 60.0) + read_timeout: Read timeout in seconds (default: 300.0) + + Returns: + Dictionary of model configuration parameters. + Automatically uses BedrockModel for regional models (us.*, eu.*) and + AnthropicModel with AnthropicBedrock for cross-region models (global.anthropic.*). + """ + # Configure retry behavior and timeouts using boto3 Config + boto_config = Config( + retries={ + "max_attempts": max_retries, + "mode": "adaptive", # Uses exponential backoff with adaptive retry mode + }, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + + # Get model-specific maximum token limits + model_max = get_model_max_tokens(model_id) + + # Use config value if provided, but cap at model's maximum + if max_tokens is not None: + if max_tokens > model_max: + logger.warning( + "Config max_tokens exceeds model limit, capping at model maximum", + extra={ + "config_max_tokens": max_tokens, + "model_max_tokens": model_max, + "model_id": model_id, + }, + ) + max_output_tokens = model_max + else: + max_output_tokens = max_tokens + else: + # No config value - use model maximum + max_output_tokens = model_max + + # Build base model config + model_config = dict( + model_id=model_id, boto_client_config=boto_config, max_tokens=max_output_tokens + ) + + logger.info( + "Setting max_tokens for model", + extra={ + "max_tokens": max_output_tokens, + "model_id": model_id, + "model_max_tokens": model_max, + }, + ) + + # Auto-detect caching support based on model capabilities + if supports_prompt_caching(model_id): + model_config["cache_prompt"] = "default" + logger.info( + "Prompt caching enabled for model", + extra={"model_id": model_id, "auto_detected": True}, + ) + + # Only enable tool caching if the model supports it (Claude only, not Nova) + if supports_tool_caching(model_id): + model_config["cache_tools"] = "default" + logger.info( + "Tool caching enabled for model", + extra={"model_id": model_id, "auto_detected": True}, + ) + else: + logger.info( + "Tool caching not supported for model", + extra={"model_id": model_id, "reason": "prompt_caching_only"}, + ) + else: + logger.debug("Caching not supported for model", extra={"model_id": model_id}) + + return model_config diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 04f97632..1e1c3126 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -278,34 +278,7 @@ class AssessmentConfig(BaseModel): - For bounding boxes, provide normalized coordinates (0.0 to 1.0) in the format: {"left": x1, "top": y1, "width": w, "height": h} - -{ATTRIBUTE_NAMES_AND_DESCRIPTIONS} - - -<> - - -{DOCUMENT_IMAGE} - - - -{OCR_TEXT_CONFIDENCE} - - -<> - - -{EXTRACTION_RESULTS} - - -Provide your assessment as a JSON object with this exact structure: -{ - "attribute_name": { - "confidence": 0.0 to 1.0, - "confidence_reason": "explanation", - "bounding_box": {"left": 0.0, "top": 0.0, "width": 0.0, "height": 0.0} - } -}""", +""", description="Task prompt template for assessment", ) temperature: float = Field(default=0.0, ge=0.0, le=1.0) diff --git a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py index f34ffc15..6154f185 100644 --- a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py +++ b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py @@ -11,7 +11,6 @@ import json import logging import os -import re import threading from pathlib import Path from typing import ( @@ -22,7 +21,6 @@ import jsonpatch from aws_lambda_powertools import Logger -from botocore.config import Config from PIL import Image from pydantic import BaseModel, Field from strands import Agent, tool @@ -36,7 +34,9 @@ ImageSource, ) -from idp_common.bedrock.client import CACHEPOINT_SUPPORTED_MODELS +from idp_common.bedrock import ( + build_model_config, +) from idp_common.config.models import IDPConfig from idp_common.utils.bedrock_utils import ( async_exponential_backoff_retry, @@ -86,35 +86,6 @@ def detect_image_format(image_bytes: bytes) -> str: TargetModel = TypeVar("TargetModel", bound=BaseModel) -def supports_tool_caching(model_id: str) -> bool: - """ - Check if a model supports tool caching (cachePoint in toolConfig). - - Note: Only Claude models support tool caching. Nova models support - prompt caching but NOT tool caching. - - Args: - model_id: The Bedrock model identifier - - Returns: - True if the model supports tool caching, False otherwise - """ - return "anthropic.claude" in model_id or "us.anthropic.claude" in model_id - - -def supports_prompt_caching(model_id: str) -> bool: - """ - Check if a model supports prompt caching (cachePoint in system prompt). - - Args: - model_id: The Bedrock model identifier - - Returns: - True if the model supports prompt caching, False otherwise - """ - return model_id in CACHEPOINT_SUPPORTED_MODELS - - class BedrockUsage(TypedDict, total=False): """Token usage information from Bedrock response.""" @@ -1055,7 +1026,7 @@ async def structured_output_async( ) # Build model configuration with token limits and caching - model_config = _build_model_config( + model_config = build_model_config( model_id=model_id, max_tokens=max_tokens, max_retries=max_retries, @@ -1145,7 +1116,7 @@ async def structured_output_async( ], ) # Build config for review agent - review_model_config = _build_model_config( + review_model_config = build_model_config( model_id=config.extraction.agentic.review_agent_model, max_tokens=max_tokens, max_retries=max_retries, diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index 62e1f082..eb12c58e 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -1116,8 +1116,8 @@ requires-dist = [ { name = "rich", marker = "extra == 'test'", specifier = ">=13.0.0" }, { name = "ruff", marker = "extra == 'test'", specifier = ">=0.14.0" }, { name = "s3fs", marker = "extra == 'criteria-validation'", specifier = "==2023.12.2" }, - { name = "stickler-eval", marker = "extra == 'all'", specifier = "==0.1.2" }, - { name = "stickler-eval", marker = "extra == 'evaluation'", specifier = "==0.1.2" }, + { name = "stickler-eval", marker = "extra == 'all'", specifier = "==0.1.3" }, + { name = "stickler-eval", marker = "extra == 'evaluation'", specifier = "==0.1.3" }, { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agentic-extraction'", specifier = "==1.17.0" }, { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'agents'", specifier = "==1.17.0" }, { name = "strands-agents", marker = "python_full_version >= '3.10' and extra == 'all'", specifier = "==1.17.0" }, @@ -3096,7 +3096,7 @@ wheels = [ [[package]] name = "stickler-eval" -version = "0.1.2" +version = "0.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonschema" }, @@ -3108,9 +3108,9 @@ dependencies = [ { name = "rapidfuzz" }, { name = "scipy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/7d/001bffe60e417315c470e77d7d0832f3fce9565f9f491450a3d3497df2df/stickler_eval-0.1.2.tar.gz", hash = "sha256:a9bded773c0c4d8ee08ba03b422ee4655c6a7292f572264f2cd74a565fdbc3f8", size = 132677, upload-time = "2025-11-06T20:07:41.704Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/0b/88054b339bc9c2f9147570710c41d2cf13e355df89894bb91ff3a93ff644/stickler_eval-0.1.3.tar.gz", hash = "sha256:d517a413f2e019a947e25bf0d87da6595ae908c0ff3b19e691af77589d1eb6a9", size = 141437, upload-time = "2025-11-18T19:49:29.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/cf/e37a0731e2a1e4c48b5dbf4dd7b1f8d421367ed9a8fb09a1f78ccdebaaaf/stickler_eval-0.1.2-py3-none-any.whl", hash = "sha256:1631bb40cf625e473c22e92158e927412b59bb1f267c06ba47750bac71cc8483", size = 161224, upload-time = "2025-11-06T20:07:40.556Z" }, + { url = "https://files.pythonhosted.org/packages/28/42/502852c1951f1297f4b751c68f4eec1d103468e6b3fc20c75d3120eeb8b7/stickler_eval-0.1.3-py3-none-any.whl", hash = "sha256:b1256f6fd4aa167e67bff14133fe7149e5e48f0f2f526d9174075df3a4333f57", size = 173279, upload-time = "2025-11-18T19:49:28.093Z" }, ] [[package]] From 076092fe96d649ebd48cb38ff081205e9a672d6a Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Fri, 21 Nov 2025 16:14:59 +0000 Subject: [PATCH 11/30] fix template --- lib/idp_common_pkg/idp_common/assessment/models.py | 4 ++-- lib/idp_common_pkg/idp_common/assessment/service.py | 5 +++-- lib/idp_common_pkg/pyproject.toml | 1 + lib/idp_common_pkg/uv.lock | 2 ++ 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index 909a6ae3..07c8d2cd 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -110,9 +110,9 @@ class Geometry(BaseModel): class ConfidenceAlert(BaseModel): """Alert for confidence threshold violation.""" - attribute_name: str + attribute_name: str = Field(serialization_alias="attributeName") confidence: float - confidence_threshold: float + confidence_threshold: float = Field(serialization_alias="confidenceThreshold") @field_validator("confidence", "confidence_threshold", mode="before") @classmethod diff --git a/lib/idp_common_pkg/idp_common/assessment/service.py b/lib/idp_common_pkg/idp_common/assessment/service.py index 4ffe976a..5e7f0973 100644 --- a/lib/idp_common_pkg/idp_common/assessment/service.py +++ b/lib/idp_common_pkg/idp_common/assessment/service.py @@ -1142,9 +1142,10 @@ def process_document_section(self, document: Document, section_id: str) -> Docum # Update the section in the document with confidence threshold alerts for doc_section in document.sections: if doc_section.section_id == section_id: - # Convert ConfidenceAlert objects to dicts + # Convert ConfidenceAlert objects to dicts with camelCase keys for UI doc_section.confidence_threshold_alerts = [ - alert.model_dump() for alert in confidence_threshold_alerts + alert.model_dump(by_alias=True) + for alert in confidence_threshold_alerts ] break diff --git a/lib/idp_common_pkg/pyproject.toml b/lib/idp_common_pkg/pyproject.toml index 5d2dbd2a..34ff7683 100644 --- a/lib/idp_common_pkg/pyproject.toml +++ b/lib/idp_common_pkg/pyproject.toml @@ -26,6 +26,7 @@ dependencies = [ "pydantic>=2.12.0", "deepdiff>=6.0.0", "mypy-boto3-bedrock-runtime>=1.40.76", + "aws-lambda-powertools>=3.21.0", ] readme = "README.md" classifiers = [ diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index eb12c58e..26dfb208 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -921,6 +921,7 @@ name = "idp-common" version = "0.4.2" source = { editable = "." } dependencies = [ + { name = "aws-lambda-powertools" }, { name = "boto3" }, { name = "deepdiff" }, { name = "jsonschema" }, @@ -1048,6 +1049,7 @@ test = [ requires-dist = [ { name = "amazon-textract-textractor", extras = ["pandas"], marker = "extra == 'all'", specifier = "==1.9.2" }, { name = "amazon-textract-textractor", extras = ["pandas"], marker = "extra == 'ocr'", specifier = "==1.9.2" }, + { name = "aws-lambda-powertools", specifier = ">=3.21.0" }, { name = "aws-lambda-powertools", marker = "extra == 'agentic-extraction'", specifier = ">=3.2.0" }, { name = "aws-lambda-powertools", marker = "extra == 'assessment'", specifier = ">=3.2.0" }, { name = "aws-xray-sdk", marker = "extra == 'docs-service'", specifier = ">=2.14.0" }, From 839e5206295b7b117ad00a2f6587753b58edc2e8 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Sat, 22 Nov 2025 18:27:56 +0000 Subject: [PATCH 12/30] missing dep --- lib/idp_common_pkg/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/idp_common_pkg/pyproject.toml b/lib/idp_common_pkg/pyproject.toml index 34ff7683..ba459bba 100644 --- a/lib/idp_common_pkg/pyproject.toml +++ b/lib/idp_common_pkg/pyproject.toml @@ -74,6 +74,7 @@ extraction = [ # Assessment module dependencies assessment = [ "Pillow==11.2.1", # For image handling + "PyMuPDF==1.25.5", # Required by OcrService for PDF processing "aws-lambda-powertools>=3.2.0", # Structured logging and observability "mypy-boto3-dynamodb>=1.40.56", "strands-agents==1.17.0", From 79fe3e763466b43007c6865a753b34b456f50f7c Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Sun, 23 Nov 2025 12:01:12 +0000 Subject: [PATCH 13/30] update config model usage --- .../idp_common/assessment/__init__.py | 43 ++++++------------- .../tests/unit/config/test_config_models.py | 19 +++----- .../config/test_config_models_integration.py | 14 +++--- .../src/assessment_function/index.py | 24 ++++------- .../src/assessment_function/index.py | 10 ++--- 5 files changed, 38 insertions(+), 72 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/__init__.py b/lib/idp_common_pkg/idp_common/assessment/__init__.py index 17281d46..39d2330f 100644 --- a/lib/idp_common_pkg/idp_common/assessment/__init__.py +++ b/lib/idp_common_pkg/idp_common/assessment/__init__.py @@ -7,9 +7,8 @@ This module provides services for assessing the confidence and accuracy of extraction results by analyzing them against source documents using LLMs. -The module supports both: -1. Original approach: Single inference for all attributes in a section -2. Granular approach: Multiple focused inferences with caching and parallelization +All assessment now uses the granular approach with Strands agents for +multiple focused inferences with caching and parallelization. """ import logging @@ -19,33 +18,32 @@ from .granular_service import GranularAssessmentService from .models import AssessmentResult, AttributeAssessment -from .service import AssessmentService as OriginalAssessmentService logger = logging.getLogger(__name__) class AssessmentService: """ - Backward-compatible AssessmentService that automatically selects the appropriate implementation. + Assessment service for evaluating document extraction confidence. - This class maintains the same interface as the original AssessmentService but automatically - chooses between the original and granular implementations based on configuration. + This class uses the granular Strands-based assessment approach for all assessments. + It provides backward compatibility by maintaining the same interface. """ def __init__(self, region: str | None = None, config: IDPConfig | None = None): """ - Initialize the assessment service with automatic implementation selection. + Initialize the assessment service. Args: region: AWS region for Bedrock - config: Configuration dictionary + config: Configuration dictionary or IDPConfig model """ if config is None: config = IDPConfig() elif isinstance(config, dict): config = IDPConfig(**config) - self._service = create_assessment_service(region=region, config=config) + self._service = GranularAssessmentService(region=region, config=config) def process_document_section(self, document, section_id: str): """Process a single section from a Document object to assess extraction confidence.""" @@ -60,40 +58,25 @@ def create_assessment_service( region: Optional[str] = None, config: Optional[IDPConfig] = None ): """ - Factory function to create the appropriate assessment service based on configuration. + Factory function to create the assessment service. Args: region: AWS region for Bedrock - config: Configuration dictionary + config: Configuration dictionary or IDPConfig model Returns: - OriginalAssessmentService or GranularAssessmentService based on configuration + GranularAssessmentService instance """ if not config: config = IDPConfig() - logger.info("No config provided, using original AssessmentService") - return OriginalAssessmentService(region=region, config=config) - # Check if granular assessment is enabled (default: False for backward compatibility) - - # Normalize the enabled value to handle both boolean and string values - - logger.info( - f"Granular assessment enabled check: raw_value={config.assessment.granular.enabled} (type: {type(config.assessment.granular.enabled)})" - ) - - if config.assessment.granular.enabled: - logger.info("Granular assessment enabled, using GranularAssessmentService") - return GranularAssessmentService(region=region, config=config) - else: - logger.info("Using original AssessmentService") - return OriginalAssessmentService(region=region, config=config) + logger.info("Creating GranularAssessmentService (Strands-based assessment)") + return GranularAssessmentService(region=region, config=config) __all__ = [ "AssessmentService", "GranularAssessmentService", - "OriginalAssessmentService", "AssessmentResult", "AttributeAssessment", "create_assessment_service", diff --git a/lib/idp_common_pkg/tests/unit/config/test_config_models.py b/lib/idp_common_pkg/tests/unit/config/test_config_models.py index 63b91ad0..9cd18788 100644 --- a/lib/idp_common_pkg/tests/unit/config/test_config_models.py +++ b/lib/idp_common_pkg/tests/unit/config/test_config_models.py @@ -142,23 +142,18 @@ def process_config(config: ExtractionConfig) -> bool: result = process_config(config) assert result is True - def test_assessment_granular_config(self): - """Test granular assessment configuration""" + def test_assessment_config(self): + """Test assessment configuration with new flat structure""" config_dict = { "model": "us.amazon.nova-lite-v1:0", - "granular": { - "enabled": True, - "list_batch_size": "5", - "simple_batch_size": "10", - "max_workers": "20", - }, + "enabled": True, + "max_workers": "20", } config = AssessmentConfig.model_validate(config_dict) - assert config.granular.enabled is True - assert config.granular.list_batch_size == 5 - assert config.granular.simple_batch_size == 10 - assert config.granular.max_workers == 20 + assert config.enabled is True + assert config.max_workers == 20 + assert isinstance(config.max_workers, int) def test_config_validation_range_checks(self): """Test that validation enforces ranges""" diff --git a/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py b/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py index bb433598..af18086e 100644 --- a/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py +++ b/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py @@ -217,12 +217,10 @@ def test_criteria_validation_config(self, config_root): assert config.assessment is not None assert isinstance(config.assessment.enabled, bool) - # Validate granular assessment settings - if hasattr(config.assessment, "granular"): - assert isinstance(config.assessment.granular.enabled, bool) - if config.assessment.granular.enabled: - assert config.assessment.granular.list_batch_size > 0 - assert config.assessment.granular.simple_batch_size > 0 + # Validate assessment settings + assert isinstance(config.assessment.enabled, bool) + if config.assessment.enabled: + assert config.assessment.max_workers > 0 def test_config_with_all_optional_fields(self, config_root): """Test that configs work even if optional fields are missing""" @@ -284,8 +282,8 @@ def test_config_type_coercion(self): assert config.extraction.top_p == 0.2 assert isinstance(config.extraction.top_p, float) - assert config.assessment.granular.list_batch_size == 5 - assert isinstance(config.assessment.granular.list_batch_size, int) + assert config.assessment.max_workers == 20 + assert isinstance(config.assessment.max_workers, int) def test_boolean_variations(self): """Test various boolean representations""" diff --git a/patterns/pattern-2/src/assessment_function/index.py b/patterns/pattern-2/src/assessment_function/index.py index e1ea12bd..18d3b281 100644 --- a/patterns/pattern-2/src/assessment_function/index.py +++ b/patterns/pattern-2/src/assessment_function/index.py @@ -151,13 +151,9 @@ def handler(event, context): if not section: raise ValueError(f"Section {section_id} not found in document") - # Check if granular assessment is enabled (moved earlier for Lambda metering context) - assessment_context = ( - "GranularAssessment" if config.assessment.granular.enabled else "Assessment" - ) - logger.info( - f"Assessment mode: {'Granular' if config.assessment.granular.enabled else 'Regular'} (context: {assessment_context})" - ) + # Assessment context for Lambda metering + assessment_context = "Assessment" + logger.info(f"Assessment mode: Strands-based (context: {assessment_context})") # Intelligent Assessment Skip: Check if extraction results already contain explainability_info if section.extraction_result_uri and section.extraction_result_uri.strip(): @@ -254,15 +250,13 @@ def handler(event, context): # Initialize assessment service with cache table for enhanced retry handling cache_table = os.environ.get("TRACKING_TABLE") - # Check if granular assessment is enabled - - # Use granular assessment service (always enabled) + # Use Strands-based granular assessment service (always enabled) from idp_common.assessment.granular_service import GranularAssessmentService assessment_service = GranularAssessmentService( config=config, cache_table=cache_table ) - logger.info("Using granular assessment service") + logger.info("Using Strands-based assessment service") # Process the document section for assessment t0 = time.time() @@ -333,11 +327,11 @@ def handler(event, context): # Assessment validation validation_enabled = ( - config.assessment.granular.enabled and config.assessment.validation_enabled + config.assessment.enabled and config.assessment.validation_enabled ) - logger.info(f"Assessment Enabled:{config.assessment.granular.enabled}") - logger.info(f"Validation Enabled:{validation_enabled}") - if not config.assessment.granular.enabled: + logger.info(f"Assessment Enabled: {config.assessment.enabled}") + logger.info(f"Validation Enabled: {validation_enabled}") + if not config.assessment.enabled: logger.info("Assessment is disabled.") elif not validation_enabled: logger.info("Assessment validation is disabled.") diff --git a/patterns/pattern-3/src/assessment_function/index.py b/patterns/pattern-3/src/assessment_function/index.py index ad29b551..9411d028 100644 --- a/patterns/pattern-3/src/assessment_function/index.py +++ b/patterns/pattern-3/src/assessment_function/index.py @@ -63,13 +63,9 @@ def handler(event, context): if not section: raise ValueError(f"Section {section_id} not found in document") - # Check if granular assessment is enabled (for Lambda metering context) - granular_config = config.assessment.granular - granular_enabled = granular_config.enabled - assessment_context = "GranularAssessment" if granular_enabled else "Assessment" - logger.info( - f"Assessment mode: {'Granular' if granular_enabled else 'Regular'} (context: {assessment_context})" - ) + # Assessment context for Lambda metering + assessment_context = "Assessment" + logger.info(f"Assessment mode: Strands-based (context: {assessment_context})") # Intelligent Assessment Skip: Check if extraction results already contain explainability_info if section.extraction_result_uri and section.extraction_result_uri.strip(): From 2e93ad3fbd5f8cc63445b5f364c1a2fc52e9c86c Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 10:44:23 +0000 Subject: [PATCH 14/30] strands argument passing update --- lib/idp_common_pkg/idp_common/assessment/strands_tools.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 44379aa5..8c319096 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -54,7 +54,7 @@ def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: Success confirmation message or validation error details """ # Validate assessment structure and return helpful errors - validated_assessment = AssessmentOutput(**assessment) # pyright: ignore[reportCallIssue] + validated_assessment = AssessmentOutput.model_validate(assessment) # Store in agent state agent.state.set("assessment_output", validated_assessment.model_dump()) @@ -80,7 +80,7 @@ def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) """ @tool - def view_image(input_data: dict[str, Any], agent: Agent) -> dict: + def view_image(input_data: ViewImageInput, agent: Agent) -> dict: """ View a specific page image, optionally highlighting a bounding box area. @@ -104,7 +104,7 @@ def view_image(input_data: dict[str, Any], agent: Agent) -> dict: }, agent) """ # Validate input - let ValidationError propagate - view_input = ViewImageInput(**input_data) + view_input = ViewImageInput.model_validate(input_data) # Validate image index exists if view_input.image_index >= len(page_images): From 338c87fbe86f0160303ac9a9b488a622e0ac9a62 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 10:46:54 +0000 Subject: [PATCH 15/30] update the tests for the config --- .../idp_common/assessment/example_usage.py | 109 ++- .../test_assessment_backward_compatibility.py | 650 ------------------ .../config/test_config_models_integration.py | 8 +- lib/idp_common_pkg/uv.lock | 2 + 4 files changed, 45 insertions(+), 724 deletions(-) delete mode 100644 lib/idp_common_pkg/tests/test_assessment_backward_compatibility.py diff --git a/lib/idp_common_pkg/idp_common/assessment/example_usage.py b/lib/idp_common_pkg/idp_common/assessment/example_usage.py index 90ecde7f..4cbe36f0 100644 --- a/lib/idp_common_pkg/idp_common/assessment/example_usage.py +++ b/lib/idp_common_pkg/idp_common/assessment/example_usage.py @@ -3,10 +3,11 @@ # SPDX-License-Identifier: MIT-0 """ -Example usage of the Granular Assessment Service. +Example usage of the Strands-Based Assessment Service. -This script demonstrates how to use the granular assessment approach +This script demonstrates how to use the Strands-based assessment approach for improved accuracy and scalability when assessing document extraction confidence. +All assessment now uses this unified approach with tool-based agent interactions. """ import json @@ -30,13 +31,14 @@ def load_config(config_path: str) -> Dict[str, Any]: def example_granular_assessment(): - """Example of using granular assessment service.""" + """Example of using Strands-based assessment service.""" - # Load configuration with granular assessment enabled + # Load configuration for assessment (always uses Strands-based approach) config = { "assessment": { + "enabled": True, "default_confidence_threshold": 0.9, - "model": "us.anthropic.claude-3-7-sonnet-20250219-v1:0", + "model": "us.anthropic.claude-sonnet-4-20250514-v1:0", "system_prompt": "You are a document analysis assessment expert...", "task_prompt": """ @@ -53,13 +55,8 @@ def example_granular_assessment(): Analyze and provide confidence assessments... """, - # Granular assessment configuration - "granular": { - "enabled": True, - "max_workers": 20, - "simple_batch_size": 3, - "list_batch_size": 1, - }, + # Strands-based assessment settings + "max_workers": 20, }, "classes": [ { @@ -158,86 +155,61 @@ def example_granular_assessment(): def compare_approaches(): - """Compare original vs granular assessment approaches.""" - - logger.info("=== Comparison: Original vs Granular Assessment ===") + """Demonstrate the Strands-based assessment approach.""" - # Configuration for original approach - original_config = {"assessment": {"granular": {"enabled": False}}} + logger.info("=== Strands-Based Assessment Approach ===") - # Configuration for granular approach - granular_config = { + # Configuration for Strands-based assessment (always used) + assessment_config = { "assessment": { - "granular": { - "enabled": True, - "max_workers": 4, - "simple_batch_size": 3, - "list_batch_size": 1, - } + "enabled": True, + "max_workers": 4, } } from idp_common.assessment import create_assessment_service - # Create both services - original_service = create_assessment_service(config=original_config) - granular_service = create_assessment_service(config=granular_config) - - logger.info(f"Original service: {type(original_service).__name__}") - logger.info(f"Granular service: {type(granular_service).__name__}") + # Create assessment service + assessment_service = create_assessment_service(config=assessment_config) - # Show the differences - logger.info("\nKey Differences:") - logger.info("Original Approach:") - logger.info(" - Single inference for all attributes") - logger.info(" - Simple implementation") - logger.info(" - May struggle with complex documents") + logger.info(f"Assessment service: {type(assessment_service).__name__}") - logger.info("\nGranular Approach:") - logger.info(" - Multiple focused inferences") + # Show the features + logger.info("\nStrands-Based Assessment Features:") + logger.info(" - Multiple focused inferences per field") + logger.info(" - Tool-based interaction with Strands agents") logger.info(" - Prompt caching for cost optimization") logger.info(" - Parallel processing for speed") logger.info(" - Better handling of complex documents") + logger.info(" - Consistent assessment structure") def demonstrate_configuration_options(): - """Demonstrate different configuration options for granular assessment.""" + """Demonstrate different configuration options for Strands-based assessment.""" logger.info("=== Configuration Options ===") # Conservative configuration (good for starting) conservative_config = { "assessment": { - "granular": { - "enabled": True, - "max_workers": 2, - "simple_batch_size": 2, - "list_batch_size": 1, - } + "enabled": True, + "max_workers": 2, } } # Aggressive configuration (for high-throughput) aggressive_config = { "assessment": { - "granular": { - "enabled": True, - "max_workers": 8, - "simple_batch_size": 5, - "list_batch_size": 2, - } + "enabled": True, + "max_workers": 50, } } # Balanced configuration (recommended) balanced_config = { "assessment": { - "granular": { - "enabled": True, - "max_workers": 4, - "simple_batch_size": 3, - "list_batch_size": 1, - } + "enabled": True, + "max_workers": 20, } } @@ -249,23 +221,23 @@ def demonstrate_configuration_options(): for name, config in configs.items(): logger.info(f"\n{name} Configuration:") - granular_settings = config["assessment"]["granular"] - for key, value in granular_settings.items(): + assessment_settings = config["assessment"] + for key, value in assessment_settings.items(): logger.info(f" {key}: {value}") def main(): """Main example function.""" - logger.info("=== Granular Assessment Service Examples ===") + logger.info("=== Strands-Based Assessment Service Examples ===") try: # Example 1: Basic usage logger.info("\n1. Basic Usage Example") service, config = example_granular_assessment() - # Example 2: Compare approaches - logger.info("\n2. Approach Comparison") + # Example 2: Demonstrate the approach + logger.info("\n2. Assessment Approach") compare_approaches() # Example 3: Configuration options @@ -273,11 +245,12 @@ def main(): demonstrate_configuration_options() logger.info("\n=== Examples Complete ===") - logger.info("To use granular assessment in your application:") - logger.info("1. Add granular configuration to your config file") - logger.info("2. Use create_assessment_service() factory function") - logger.info("3. Process documents with the same interface") - logger.info("4. Monitor performance and tune parameters") + logger.info("To use Strands-based assessment in your application:") + logger.info("1. Set assessment.enabled to true in your config") + logger.info("2. Configure max_workers based on your throughput needs") + logger.info("3. Use create_assessment_service() factory function") + logger.info("4. Process documents with the same interface") + logger.info("5. Monitor performance and tune max_workers parameter") except ImportError as e: logger.error(f"Import error: {e}") diff --git a/lib/idp_common_pkg/tests/test_assessment_backward_compatibility.py b/lib/idp_common_pkg/tests/test_assessment_backward_compatibility.py deleted file mode 100644 index a8e17c88..00000000 --- a/lib/idp_common_pkg/tests/test_assessment_backward_compatibility.py +++ /dev/null @@ -1,650 +0,0 @@ -# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -# SPDX-License-Identifier: MIT-0 - -""" -Test backward compatibility of assessment service with granular assessment enabled/disabled. -This test emulates the Pattern-2 AssessmentFunction to ensure both standard and granular -assessment services work correctly. -""" - -import json -import os -import unittest -from typing import Any, Dict -from unittest.mock import patch - -from idp_common import assessment -from idp_common.config.models import IDPConfig -from idp_common.models import Document, Page, Section, Status - - -class TestAssessmentBackwardCompatibility(unittest.TestCase): - """Test assessment service backward compatibility.""" - - def setUp(self): - """Set up test fixtures.""" - # Create a mock document with extraction results - self.document = self._create_test_document() - self.section_id = "section_1" - - # Base configuration for assessment (JSON Schema format) - self.base_config = { - "classes": [ - { - "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "invoice", - "x-aws-idp-document-type": "invoice", - "type": "object", - "description": "A billing document", - "properties": { - "invoice_number": { - "type": "string", - "description": "The unique identifier for the invoice", - "x-aws-idp-confidence-threshold": 0.85, - }, - "total_amount": { - "type": "string", - "description": "The final amount to be paid", - "x-aws-idp-confidence-threshold": 0.9, - }, - "vendor_info": { - "type": "object", - "description": "Vendor information", - "properties": { - "vendor_name": { - "type": "string", - "description": "Name of the vendor", - "x-aws-idp-confidence-threshold": 0.8, - }, - "vendor_address": { - "type": "string", - "description": "Address of the vendor", - "x-aws-idp-confidence-threshold": 0.75, - }, - }, - }, - "line_items": { - "type": "array", - "description": "List of invoice line items", - "x-aws-idp-list-item-description": "Individual line item", - "items": { - "type": "object", - "properties": { - "item_description": { - "type": "string", - "description": "Description of the item", - "x-aws-idp-confidence-threshold": 0.7, - }, - "item_amount": { - "type": "string", - "description": "Amount for this item", - "x-aws-idp-confidence-threshold": 0.8, - }, - }, - }, - }, - }, - } - ], - "assessment": { - "model": "us.anthropic.claude-3-haiku-20240307-v1:0", - "system_prompt": "You are an assessment expert.", - "task_prompt": "Assess the confidence of extraction results for this {DOCUMENT_CLASS} document.\n\n\n{ATTRIBUTE_NAMES_AND_DESCRIPTIONS}\n\n\n<>\n\n\n{DOCUMENT_IMAGE}\n\n\n\n{OCR_TEXT_CONFIDENCE}\n\n\n<>\n\n\n{EXTRACTION_RESULTS}\n", - "temperature": "0.0", - "top_p": "0.1", - "top_k": "5", - "max_tokens": "4096", - "default_confidence_threshold": "0.9", - }, - } - - def _create_test_document(self) -> Document: - """Create a test document with extraction results.""" - # Create test pages - pages = {} - for i in range(1, 3): # 2 pages - page = Page( - page_id=str(i), - image_uri=f"s3://test-bucket/images/page_{i}.jpg", - parsed_text_uri=f"s3://test-bucket/text/page_{i}.txt", - raw_text_uri=f"s3://test-bucket/raw/page_{i}.json", - ) - pages[str(i)] = page - - # Create test section with extraction results - section = Section( - section_id="section_1", - classification="invoice", - page_ids=["1", "2"], - extraction_result_uri="s3://test-bucket/extraction/section_1.json", - ) - - # Create document - document = Document( - id="test_doc_123", - input_key="test_document.pdf", - status=Status.EXTRACTING, - pages=pages, - sections=[section], - ) - - return document - - def _create_test_extraction_results(self) -> Dict[str, Any]: - """Create test extraction results.""" - return { - "inference_result": { - "invoice_number": "INV-2024-001", - "total_amount": "$1,234.56", - "vendor_info": { - "vendor_name": "ACME Corp", - "vendor_address": "123 Main St, City, State", - }, - "line_items": [ - {"item_description": "Widget A", "item_amount": "$500.00"}, - {"item_description": "Widget B", "item_amount": "$734.56"}, - ], - }, - "metadata": {"extraction_time": 5.2}, - } - - def _create_mock_image_data(self) -> bytes: - """Create a valid mock image data for testing.""" - import io - - from PIL import Image - - # Create a simple 100x100 white image - img = Image.new("RGB", (100, 100), color="white") - img_bytes = io.BytesIO() - img.save(img_bytes, format="JPEG") - return img_bytes.getvalue() - - def _create_mock_assessment_response(self, task_type: str = "standard") -> str: - """Create mock assessment response based on task type.""" - if task_type == "simple_batch": - return json.dumps( - { - "invoice_number": { - "confidence": 0.95, - "confidence_reason": "Clear text, high OCR confidence", - }, - "total_amount": { - "confidence": 0.88, - "confidence_reason": "Clearly visible amount", - }, - } - ) - elif task_type == "group": - return json.dumps( - { - "vendor_info": { - "vendor_name": { - "confidence": 0.92, - "confidence_reason": "Company name clearly visible", - }, - "vendor_address": { - "confidence": 0.78, - "confidence_reason": "Address partially obscured", - }, - } - } - ) - elif task_type == "list_item": - return json.dumps( - { - "item_description": { - "confidence": 0.85, - "confidence_reason": "Item description clear", - }, - "item_amount": { - "confidence": 0.90, - "confidence_reason": "Amount clearly visible", - }, - } - ) - else: # standard assessment - return json.dumps( - { - "invoice_number": { - "confidence": 0.95, - "confidence_reason": "Clear text, high OCR confidence", - }, - "total_amount": { - "confidence": 0.88, - "confidence_reason": "Clearly visible amount", - }, - "vendor_info": { - "vendor_name": { - "confidence": 0.92, - "confidence_reason": "Company name clearly visible", - }, - "vendor_address": { - "confidence": 0.78, - "confidence_reason": "Address partially obscured", - }, - }, - "line_items": [ - { - "item_description": { - "confidence": 0.85, - "confidence_reason": "Item description clear", - }, - "item_amount": { - "confidence": 0.90, - "confidence_reason": "Amount clearly visible", - }, - }, - { - "item_description": { - "confidence": 0.82, - "confidence_reason": "Item description mostly clear", - }, - "item_amount": { - "confidence": 0.87, - "confidence_reason": "Amount visible", - }, - }, - ], - } - ) - - @patch("idp_common.s3.get_json_content") - @patch("idp_common.s3.get_text_content") - @patch("idp_common.s3.write_content") - @patch("idp_common.image.prepare_image") - @patch("idp_common.image.prepare_bedrock_image_attachment") - @patch("idp_common.bedrock.invoke_model") - def test_standard_assessment_service( - self, - mock_invoke_model, - mock_prepare_bedrock_image, - mock_prepare_image, - mock_write_content, - mock_get_text_content, - mock_get_json_content, - ): - """Test standard assessment service (granular disabled).""" - # Configure mocks - mock_get_json_content.return_value = self._create_test_extraction_results() - mock_get_text_content.return_value = "Sample document text content" - mock_prepare_image.return_value = self._create_mock_image_data() - mock_prepare_bedrock_image.return_value = { - "type": "image", - "source": { - "type": "base64", - "media_type": "image/jpeg", - "data": "mock_data", - }, - } - - # Mock Bedrock response for standard assessment - mock_response = { - "output": { - "message": { - "content": [ - {"text": self._create_mock_assessment_response("standard")} - ] - } - }, - "metering": {"inputTokens": 1000, "outputTokens": 200, "totalTokens": 1200}, - } - mock_invoke_model.return_value = mock_response - - # Create config with granular assessment disabled - config = self.base_config.copy() - config["assessment"]["granular"] = { - "enabled": False, - "max_workers": 4, - "simple_batch_size": 3, - "list_batch_size": 1, - } - - # Initialize assessment service with IDPConfig model - idp_config = IDPConfig.model_validate(config) - assessment_service = assessment.AssessmentService(config=idp_config) - - # Process document section - result_document = assessment_service.process_document_section( - self.document, self.section_id - ) - - # Verify the service used standard assessment - self.assertIsNotNone(result_document) - self.assertEqual(result_document.id, self.document.id) - - # Verify Bedrock was called once (standard assessment) - self.assertEqual(mock_invoke_model.call_count, 1) - - # Verify the call was made with expected parameters - call_args = mock_invoke_model.call_args - self.assertIn("system_prompt", call_args[1]) - self.assertIn("content", call_args[1]) - - # Verify extraction results were written back - mock_write_content.assert_called_once() - written_data = mock_write_content.call_args[0][0] - self.assertIn("explainability_info", written_data) - self.assertIsInstance(written_data["explainability_info"], list) - - @patch("idp_common.s3.get_json_content") - @patch("idp_common.s3.get_text_content") - @patch("idp_common.s3.write_content") - @patch("idp_common.image.prepare_image") - @patch("idp_common.image.prepare_bedrock_image_attachment") - @patch("idp_common.bedrock.invoke_model") - def test_granular_assessment_service( - self, - mock_invoke_model, - mock_prepare_bedrock_image, - mock_prepare_image, - mock_write_content, - mock_get_text_content, - mock_get_json_content, - ): - """Test granular assessment service (granular enabled).""" - # Configure mocks - mock_get_json_content.return_value = self._create_test_extraction_results() - mock_get_text_content.return_value = "Sample document text content" - mock_prepare_image.return_value = self._create_mock_image_data() - mock_prepare_bedrock_image.return_value = { - "type": "image", - "source": { - "type": "base64", - "media_type": "image/jpeg", - "data": "mock_data", - }, - } - - # Mock Bedrock responses for granular assessment (multiple calls) - def mock_bedrock_side_effect(*args, **kwargs): - # Determine response based on content - content = kwargs.get("content", []) - content_text = "" - for item in content: - if isinstance(item, dict) and "text" in item: - content_text += item["text"] - - # Return appropriate response based on content - if "invoice_number" in content_text and "total_amount" in content_text: - response_text = self._create_mock_assessment_response("simple_batch") - elif "vendor_info" in content_text: - response_text = self._create_mock_assessment_response("group") - elif "item_description" in content_text: - response_text = self._create_mock_assessment_response("list_item") - else: - response_text = self._create_mock_assessment_response("standard") - - return { - "content": [{"text": response_text}], - "metering": { - "inputTokens": 500, - "outputTokens": 100, - "totalTokens": 600, - }, - } - - mock_invoke_model.side_effect = mock_bedrock_side_effect - - # Create config with granular assessment enabled - config = self.base_config.copy() - config["assessment"]["granular"] = { - "enabled": True, - "max_workers": 4, - "simple_batch_size": 2, # Will batch invoice_number and total_amount - "list_batch_size": 1, # Will process each list item separately - } - - # Initialize assessment service with IDPConfig model - idp_config = IDPConfig.model_validate(config) - assessment_service = assessment.AssessmentService(config=idp_config) - - # Process document section - result_document = assessment_service.process_document_section( - self.document, self.section_id - ) - - # Verify the service used granular assessment - self.assertIsNotNone(result_document) - self.assertEqual(result_document.id, self.document.id) - - # Verify Bedrock was called multiple times (granular assessment) - # Expected: 1 simple batch + 1 group + 2 list items = 4 calls - self.assertGreater(mock_invoke_model.call_count, 1) - - # Verify extraction results were written back - mock_write_content.assert_called_once() - written_data = mock_write_content.call_args[0][0] - self.assertIn("explainability_info", written_data) - self.assertIsInstance(written_data["explainability_info"], list) - - # Verify granular assessment metadata - metadata = written_data.get("metadata", {}) - self.assertTrue(metadata.get("granular_assessment_used", False)) - self.assertIn("assessment_tasks_total", metadata) - self.assertIn("assessment_tasks_successful", metadata) - - @patch("idp_common.s3.get_json_content") - @patch("idp_common.s3.get_text_content") - @patch("idp_common.s3.write_content") - @patch("idp_common.image.prepare_image") - @patch("idp_common.bedrock.invoke_model") - def test_assessment_factory_selection( - self, - mock_invoke_model, - mock_prepare_image, - mock_write_content, - mock_get_text_content, - mock_get_json_content, - ): - """Test that the assessment factory correctly selects standard vs granular service.""" - # Configure mocks - mock_get_json_content.return_value = self._create_test_extraction_results() - mock_get_text_content.return_value = "Sample document text content" - mock_prepare_image.return_value = b"mock_image_data" - mock_invoke_model.return_value = { - "content": [{"text": self._create_mock_assessment_response("standard")}], - "metering": {"inputTokens": 1000, "outputTokens": 200, "totalTokens": 1200}, - } - - # Test 1: Granular disabled - should use standard service - config_standard = self.base_config.copy() - config_standard["assessment"]["granular"] = {"enabled": False} - - idp_config_standard = IDPConfig.model_validate(config_standard) - service_standard = assessment.AssessmentService(config=idp_config_standard) - # The main service is AssessmentService, but it should use the standard service internally - self.assertEqual(type(service_standard).__name__, "AssessmentService") - # Check that granular is disabled in config - granular_config = config_standard.get("assessment", {}).get("granular", {}) - self.assertFalse(granular_config.get("enabled", False)) - - # Test 2: Granular enabled - should use granular service - config_granular = self.base_config.copy() - config_granular["assessment"]["granular"] = {"enabled": True} - - idp_config_granular = IDPConfig.model_validate(config_granular) - service_granular = assessment.AssessmentService(config=idp_config_granular) - self.assertEqual(type(service_granular).__name__, "AssessmentService") - # Check that granular is enabled in config - granular_config = config_granular.get("assessment", {}).get("granular", {}) - self.assertTrue(granular_config.get("enabled", False)) - - # Test 3: No granular config - should default to standard - config_default = self.base_config.copy() - # Remove granular config entirely - if "granular" in config_default["assessment"]: - del config_default["assessment"]["granular"] - - idp_config_default = IDPConfig.model_validate(config_default) - service_default = assessment.AssessmentService(config=idp_config_default) - self.assertEqual(type(service_default).__name__, "AssessmentService") - # Check that granular is not configured (defaults to disabled) - granular_config = config_default.get("assessment", {}).get("granular", {}) - self.assertFalse(granular_config.get("enabled", False)) - - def test_confidence_threshold_handling(self): - """Test that confidence thresholds are handled correctly in both services.""" - # Test with various threshold formats (float, int, None) - test_cases = [ - (0.85, 0.85), - (0.9, 0.9), - (1, 1.0), - (None, 0.9), # Should use default - ] - - for threshold_input, expected_output in test_cases: - config = self.base_config.copy() - if threshold_input is not None: - config["classes"][0]["properties"]["invoice_number"][ - "x-aws-idp-confidence-threshold" - ] = threshold_input - else: - # Remove confidence_threshold to test None case - if ( - "x-aws-idp-confidence-threshold" - in config["classes"][0]["properties"]["invoice_number"] - ): - del config["classes"][0]["properties"]["invoice_number"][ - "x-aws-idp-confidence-threshold" - ] - - # Test with both standard and granular services - for granular_enabled in [False, True]: - config["assessment"]["granular"] = {"enabled": granular_enabled} - - idp_config = IDPConfig.model_validate(config) - service = assessment.AssessmentService(config=idp_config) - - # The service should initialize without errors - self.assertIsNotNone(service) - - @patch.dict(os.environ, {"WORKING_BUCKET": "test-bucket"}) - def test_assessment_function_emulation(self): - """Test that emulates the actual AssessmentFunction handler.""" - # This test emulates the patterns/pattern-2/src/assessment_function/index.py handler - - # Mock event data (similar to what Lambda receives) - event = { - "document": { - "id": "test_doc_123", - "input_key": "test_document.pdf", - "status": "EXTRACTING", - "pages": { - "1": { - "page_id": "1", - "image_uri": "s3://test-bucket/images/page_1.jpg", - "parsed_text_uri": "s3://test-bucket/text/page_1.txt", - "raw_text_uri": "s3://test-bucket/raw/page_1.json", - } - }, - "sections": [ - { - "section_id": "section_1", - "classification": "invoice", - "page_ids": ["1"], - "extraction_result_uri": "s3://test-bucket/extraction/section_1.json", - } - ], - }, - "section_id": "section_1", - } - - # Test with both granular enabled and disabled - for granular_enabled in [False, True]: - with self.subTest(granular_enabled=granular_enabled): - config = self.base_config.copy() - config["assessment"]["granular"] = {"enabled": granular_enabled} - - # Emulate the handler logic - with ( - patch("idp_common.get_config", return_value=config), - patch( - "idp_common.s3.get_json_content", - return_value=self._create_test_extraction_results(), - ), - patch("idp_common.s3.get_text_content", return_value="Sample text"), - patch("idp_common.s3.write_content"), - patch( - "idp_common.image.prepare_image", - return_value=self._create_mock_image_data(), - ), - patch( - "idp_common.image.prepare_bedrock_image_attachment", - return_value={ - "type": "image", - "source": { - "type": "base64", - "media_type": "image/jpeg", - "data": "mock_data", - }, - }, - ), - patch("idp_common.bedrock.invoke_model") as mock_invoke, - ): - # Configure mock response - mock_invoke.return_value = { - "output": { - "message": { - "content": [ - { - "text": self._create_mock_assessment_response( - "standard" - ) - } - ] - } - }, - "metering": { - "inputTokens": 1000, - "outputTokens": 200, - "totalTokens": 1200, - }, - } - - # Extract inputs (emulating handler) - document_data = event.get("document", {}) - section_id = event.get("section_id") - - # Validate inputs - self.assertIsNotNone(document_data) - self.assertIsNotNone(section_id) - - # Convert to Document object (emulating Document.load_document) - document = Document.from_dict(document_data) - document.status = Status.ASSESSING - - # Initialize assessment service - idp_config = IDPConfig.model_validate(config) - assessment_service = assessment.AssessmentService(config=idp_config) - - # Process the document section - updated_document = assessment_service.process_document_section( - document, str(section_id) - ) - - # Verify processing succeeded - self.assertIsNotNone(updated_document) - self.assertEqual(updated_document.id, document.id) - self.assertNotEqual(updated_document.status, Status.FAILED) - - # Verify appropriate service was used - if granular_enabled: - self.assertEqual( - type(assessment_service._service).__name__, - "GranularAssessmentService", - ) - # Granular assessment may make multiple calls - self.assertGreaterEqual(mock_invoke.call_count, 1) - else: - # The original service is called "AssessmentService" in service.py - self.assertEqual( - type(assessment_service._service).__name__, - "AssessmentService", - ) - # Standard assessment makes exactly one call - self.assertEqual(mock_invoke.call_count, 1) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py b/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py index af18086e..ad1e854d 100644 --- a/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py +++ b/lib/idp_common_pkg/tests/unit/config/test_config_models_integration.py @@ -261,12 +261,8 @@ def test_config_type_coercion(self): }, "assessment": { "model": "test", - "granular": { - "enabled": True, - "list_batch_size": "5", # String - "simple_batch_size": 10, # Int - "max_workers": "20", # String - }, + "enabled": True, + "max_workers": "20", # String }, "classes": [], } diff --git a/lib/idp_common_pkg/uv.lock b/lib/idp_common_pkg/uv.lock index 26dfb208..db0a0b14 100644 --- a/lib/idp_common_pkg/uv.lock +++ b/lib/idp_common_pkg/uv.lock @@ -969,6 +969,7 @@ assessment = [ { name = "aws-lambda-powertools" }, { name = "mypy-boto3-dynamodb" }, { name = "pillow" }, + { name = "pymupdf" }, { name = "strands-agents" }, ] classification = [ @@ -1099,6 +1100,7 @@ requires-dist = [ { name = "pydantic", specifier = ">=2.12.0" }, { name = "pymupdf", marker = "extra == 'agentic-extraction'", specifier = "==1.25.5" }, { name = "pymupdf", marker = "extra == 'all'", specifier = "==1.25.5" }, + { name = "pymupdf", marker = "extra == 'assessment'", specifier = "==1.25.5" }, { name = "pymupdf", marker = "extra == 'ocr'", specifier = "==1.25.5" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=7.4.0" }, { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=1.1.0" }, From 16480da3323dd8b79f94790b4ff3539f27ac3c01 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 11:26:27 +0000 Subject: [PATCH 16/30] bug fixes --- lib/idp_common_pkg/idp_common/assessment/granular_service.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 482885d6..730ac76b 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -22,6 +22,7 @@ from idp_common import image, metrics, s3, utils from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_executor import execute_assessment_tasks_parallel +from idp_common.assessment.strands_service import _convert_field_path_to_string from idp_common.config.models import IDPConfig from idp_common.config.schema_constants import ( SCHEMA_ITEMS, @@ -305,7 +306,7 @@ def _traverse( task_id=f"task_{task_counter[0]}", task_type="attribute", field_path=item_path, - field_name=prop_name, + field_name=_convert_field_path_to_string(item_path), field_schema=items_schema, confidence_threshold=threshold, parent_assessment_dict=assessment_list, # type: ignore @@ -328,7 +329,7 @@ def _traverse( task_id=f"task_{task_counter[0]}", task_type="attribute", field_path=field_path, - field_name=prop_name, + field_name=_convert_field_path_to_string(field_path), field_schema=prop_schema, confidence_threshold=threshold, parent_assessment_dict=parent_dict, From 984ac36cd80d341e5484c56975091a412c1b931e Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 13:30:08 +0000 Subject: [PATCH 17/30] make sure model dump uses json mode --- lib/idp_common_pkg/idp_common/assessment/strands_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 8c319096..ca5fcdeb 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -57,11 +57,11 @@ def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: validated_assessment = AssessmentOutput.model_validate(assessment) # Store in agent state - agent.state.set("assessment_output", validated_assessment.model_dump()) + agent.state.set("assessment_output", validated_assessment.model_dump(mode="json")) logger.info( "Assessment submitted successfully", - extra={"assessment": validated_assessment.model_dump()}, + extra={"assessment": validated_assessment.model_dump(mode="json")}, ) return "Assessment submitted successfully. You can now finish the task." From af89f4721946a092d48e0c7fa430c3c5ef78a212 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 15:13:39 +0000 Subject: [PATCH 18/30] bbox update --- .../idp_common/assessment/geometry_utils.py | 116 ++++++++++++++ .../idp_common/assessment/granular_service.py | 143 +---------------- .../idp_common/assessment/models.py | 151 +++++++++++++++++- .../idp_common/assessment/service.py | 148 +---------------- .../idp_common/assessment/strands_models.py | 33 ++-- .../idp_common/assessment/strands_service.py | 44 +++-- 6 files changed, 307 insertions(+), 328 deletions(-) create mode 100644 lib/idp_common_pkg/idp_common/assessment/geometry_utils.py diff --git a/lib/idp_common_pkg/idp_common/assessment/geometry_utils.py b/lib/idp_common_pkg/idp_common/assessment/geometry_utils.py new file mode 100644 index 00000000..36cd09fa --- /dev/null +++ b/lib/idp_common_pkg/idp_common/assessment/geometry_utils.py @@ -0,0 +1,116 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Shared utilities for geometry data conversion. + +Consolidates duplicate geometry conversion logic from service.py and granular_service.py. +""" + +import os +from typing import Any + +from aws_lambda_powertools import Logger + +from idp_common.assessment.models import Geometry + +logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) + + +def process_assessment_geometry( + attr_assessment: dict[str, Any], attr_name: str = "" +) -> dict[str, Any]: + """ + Process and standardize geometry data in assessment response. + + Args: + attr_assessment: Assessment dict with potential bbox/page fields + attr_name: Field name for logging + + Returns: + Enhanced assessment with standardized geometry + """ + enhanced = attr_assessment.copy() + + # Check for bbox data + if "bbox" in attr_assessment and "page" in attr_assessment: + try: + bbox_coords = attr_assessment["bbox"] + page_num = attr_assessment["page"] + + if isinstance(bbox_coords, list) and len(bbox_coords) == 4: + # Create Geometry object and convert to UI format + geometry = Geometry.from_bbox_list(bbox_coords, page_num) + enhanced["geometry"] = [geometry.to_ui_format()] + + logger.debug( + f"Converted bbox for {attr_name}: {bbox_coords} -> geometry" + ) + else: + logger.warning(f"Invalid bbox format for {attr_name}: {bbox_coords}") + except Exception as e: + logger.warning(f"Failed to process bbox for {attr_name}: {e}") + elif "bbox" in attr_assessment and "page" not in attr_assessment: + logger.warning( + f"Found bbox without page for {attr_name} - removing incomplete bbox data" + ) + elif "page" in attr_assessment and "bbox" not in attr_assessment: + logger.warning( + f"Found page without bbox for {attr_name} - removing incomplete page data" + ) + + # Remove raw bbox/page data + enhanced.pop("bbox", None) + enhanced.pop("page", None) + + return enhanced + + +def extract_geometry_from_nested_dict( + data: dict[str, Any], path: list[str] | None = None +) -> dict[str, Any]: + """ + Recursively process geometry data in nested assessment structures. + + Args: + data: Assessment data dictionary (may contain nested dicts/lists) + path: Current path for logging + + Returns: + Enhanced data with processed geometry + """ + if path is None: + path = [] + + if not isinstance(data, dict): + return data + + result = {} + + for key, value in data.items(): + current_path = path + [key] + + if isinstance(value, dict): + # Check if this looks like an assessment entry + if "confidence" in value or "bbox" in value: + # Process this assessment + result[key] = process_assessment_geometry(value, ".".join(current_path)) + else: + # Recurse into nested dict + result[key] = extract_geometry_from_nested_dict(value, current_path) + + elif isinstance(value, list): + # Process each item in list + processed_list = [] + for i, item in enumerate(value): + if isinstance(item, dict): + processed_list.append( + extract_geometry_from_nested_dict(item, current_path + [str(i)]) + ) + else: + processed_list.append(item) + result[key] = processed_list + else: + result[key] = value + + return result diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 730ac76b..9a5fd70c 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -646,148 +646,7 @@ def _get_text_confidence_data(self, page) -> str: ) return "Text Confidence Data Unavailable" - def _convert_bbox_to_geometry( - self, bbox_coords: list[float], page_num: int - ) -> dict[str, Any]: - """ - Convert [x1,y1,x2,y2] coordinates to geometry format. - - Args: - bbox_coords: list of 4 coordinates [x1, y1, x2, y2] in 0-1000 scale - page_num: Page number where the bounding box appears - - Returns: - dictionary in geometry format compatible with pattern-1 UI - """ - if len(bbox_coords) != 4: - raise ValueError(f"Expected 4 coordinates, got {len(bbox_coords)}") - - x1, y1, x2, y2 = bbox_coords - - # Ensure coordinates are in correct order - x1, x2 = min(x1, x2), max(x1, x2) - y1, y2 = min(y1, y2), max(y1, y2) - - # Convert from normalized 0-1000 scale to 0-1 - left = x1 / 1000.0 - top = y1 / 1000.0 - width = (x2 - x1) / 1000.0 - height = (y2 - y1) / 1000.0 - - return { - "boundingBox": {"top": top, "left": left, "width": width, "height": height}, - "page": page_num, - } - - def _process_single_assessment_geometry( - self, attr_assessment: dict[str, Any], attr_name: str = "" - ) -> dict[str, Any]: - """ - Process geometry data for a single assessment (with confidence key). - - Args: - attr_assessment: Single assessment dictionary with confidence data - attr_name: Name of attribute for logging - - Returns: - Enhanced assessment with geometry converted to proper format - """ - enhanced_attr = attr_assessment.copy() - - # Check if this assessment includes bbox data - if "bbox" in attr_assessment or "page" in attr_assessment: - # Both bbox and page are required for valid geometry - if "bbox" in attr_assessment and "page" in attr_assessment: - try: - bbox_coords = attr_assessment["bbox"] - page_num = attr_assessment["page"] - - # Validate bbox coordinates - if isinstance(bbox_coords, list) and len(bbox_coords) == 4: - # Convert to geometry format - geometry = self._convert_bbox_to_geometry(bbox_coords, page_num) - enhanced_attr["geometry"] = [geometry] - - logger.debug( - f"Converted bounding box for {attr_name}: {bbox_coords} -> geometry format" - ) - else: - logger.warning( - f"Invalid bounding box format for {attr_name}: {bbox_coords}" - ) - except Exception as e: - logger.warning( - f"Failed to process bounding box for {attr_name}: {str(e)}" - ) - raise - else: - # If only one of bbox/page exists, log a warning about incomplete data - if "bbox" in attr_assessment and "page" not in attr_assessment: - logger.warning( - f"Found bbox without page for {attr_name} - removing incomplete bbox data" - ) - elif "page" in attr_assessment and "bbox" not in attr_assessment: - logger.warning( - f"Found page without bbox for {attr_name} - removing incomplete page data" - ) - - # Always remove raw bbox/page data from output (whether processed or incomplete) - enhanced_attr.pop("bbox", None) - enhanced_attr.pop("page", None) - - return enhanced_attr - - def _extract_geometry_from_assessment( - self, assessment_data: dict[str, Any] - ) -> dict[str, Any]: - """ - Extract geometry data from assessment response and convert to proper format. - Now supports recursive processing of nested group attributes. - - Args: - assessment_data: Dictionary containing assessment results from LLM - - Returns: - Enhanced assessment data with geometry information converted to proper format - """ - enhanced_assessment = {} - - for attr_name, attr_assessment in assessment_data.items(): - if isinstance(attr_assessment, dict): - # Check if this is a direct confidence assessment - if "confidence" in attr_assessment: - # This is a direct assessment - process its geometry - enhanced_assessment[attr_name] = ( - self._process_single_assessment_geometry( - attr_assessment, attr_name - ) - ) - else: - # This is a group attribute (no direct confidence) - recursively process nested attributes - logger.debug(f"Processing group attribute: {attr_name}") - enhanced_assessment[attr_name] = ( - self._extract_geometry_from_assessment(attr_assessment) - ) - - elif isinstance(attr_assessment, list): - # Handle list attributes - process each item recursively - enhanced_list = [] - for i, item_assessment in enumerate(attr_assessment): - if isinstance(item_assessment, dict): - # Recursively process each list item - enhanced_item = self._extract_geometry_from_assessment( - item_assessment - ) - enhanced_list.append(enhanced_item) - else: - # Non-dict items pass through unchanged - enhanced_list.append(item_assessment) - enhanced_assessment[attr_name] = enhanced_list - else: - # Other types pass through unchanged - enhanced_assessment[attr_name] = attr_assessment - - return enhanced_assessment + # Geometry processing uses shared utilities from geometry_utils module def process_document_section(self, document: Document, section_id: str) -> Document: """ diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index 07c8d2cd..f700e2df 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -100,11 +100,156 @@ class AssessmentResult(BaseModel): # ============================================================================ +class BoundingBoxCoordinates(BaseModel): + """Normalized bounding box coordinates (0-1 scale).""" + + top: float = Field(..., ge=0.0, le=1.0, description="Top coordinate (normalized)") + left: float = Field(..., ge=0.0, le=1.0, description="Left coordinate (normalized)") + width: float = Field(..., ge=0.0, le=1.0, description="Width (normalized)") + height: float = Field(..., ge=0.0, le=1.0, description="Height (normalized)") + + @classmethod + def from_corners( + cls, x1: float, y1: float, x2: float, y2: float, scale: float = 1000.0 + ) -> "BoundingBoxCoordinates": + """ + Create from corner coordinates. + + Args: + x1, y1: Top-left corner in 0-scale range + x2, y2: Bottom-right corner in 0-scale range + scale: Normalization scale (default 1000.0) + + Returns: + BoundingBoxCoordinates with normalized 0-1 values + """ + # Ensure coordinates are in correct order + x1, x2 = min(x1, x2), max(x1, x2) + y1, y2 = min(y1, y2), max(y1, y2) + + # Normalize to 0-1 scale + left = x1 / scale + top = y1 / scale + width = (x2 - x1) / scale + height = (y2 - y1) / scale + + return cls(top=top, left=left, width=width, height=height) + + class Geometry(BaseModel): - """Geometry in IDP format (converted from BoundingBox).""" + """ + Standard IDP geometry format compatible with UI expectations. - boundingBox: dict[str, float] # {top, left, width, height} - page: int + This is the single source of truth for geometry data structure. + Frontend expects: geometry[0].boundingBox.{left, top, width, height} + """ + + boundingBox: BoundingBoxCoordinates = Field( + ..., + description="Normalized bounding box coordinates", + serialization_alias="boundingBox", # Ensure lowercase in JSON + ) + page: int = Field(..., ge=1, description="Page number (1-indexed)") + vertices: list[dict[str, float]] | None = Field( + None, description="Optional polygon vertices for complex shapes" + ) + + @classmethod + def from_bbox_list(cls, bbox_coords: list[float], page_num: int) -> "Geometry": + """ + Create from LLM bbox response format [x1, y1, x2, y2]. + + Args: + bbox_coords: List of 4 coordinates in 0-1000 scale + page_num: Page number (1-indexed) + + Returns: + Geometry object + + Raises: + ValueError: If bbox_coords is not exactly 4 values + """ + if len(bbox_coords) != 4: + raise ValueError(f"Expected 4 coordinates, got {len(bbox_coords)}") + + x1, y1, x2, y2 = bbox_coords + bbox = BoundingBoxCoordinates.from_corners(x1, y1, x2, y2, scale=1000.0) + + return cls(boundingBox=bbox, page=page_num, vertices=None) + + def to_ui_format(self) -> dict[str, Any]: + """ + Convert to UI-compatible format. + + Returns: + Dict with geometry data: {"boundingBox": {...}, "page": 1} + """ + result: dict[str, Any] = { + "boundingBox": { + "top": self.boundingBox.top, + "left": self.boundingBox.left, + "width": self.boundingBox.width, + "height": self.boundingBox.height, + }, + "page": self.page, + } + if self.vertices is not None: + result["vertices"] = self.vertices + return result + + +class FieldAssessmentData(BaseModel): + """ + Standard assessment data for a single field. + Ensures consistent structure across all assessment services. + """ + + confidence: float = Field(..., ge=0.0, le=1.0) + value: Any = Field(None, description="The extracted value") + reasoning: str = Field(..., description="Confidence reasoning") + confidence_threshold: float = Field(..., ge=0.0, le=1.0) + geometry: list[Geometry] | None = Field( + None, + description="Bounding box locations (always wrapped in list for UI compatibility)", + ) + + @classmethod + def from_llm_response( + cls, + confidence: float, + value: Any, + reasoning: str, + confidence_threshold: float, + bbox_coords: list[float] | None = None, + page_num: int | None = None, + ) -> "FieldAssessmentData": + """Create from LLM response data.""" + geometry = None + if bbox_coords is not None and page_num is not None: + geom = Geometry.from_bbox_list(bbox_coords, page_num) + geometry = [geom] # Always wrap in list + + return cls( + confidence=confidence, + value=value, + reasoning=reasoning, + confidence_threshold=confidence_threshold, + geometry=geometry, + ) + + def to_explainability_format(self) -> dict[str, Any]: + """Convert to explainability_info format for frontend.""" + result = { + "confidence": self.confidence, + "value": self.value, + "reasoning": self.reasoning, + "confidence_threshold": self.confidence_threshold, + } + + if self.geometry: + result["geometry"] = [g.to_ui_format() for g in self.geometry] + + return result class ConfidenceAlert(BaseModel): diff --git a/lib/idp_common_pkg/idp_common/assessment/service.py b/lib/idp_common_pkg/idp_common/assessment/service.py index 5e7f0973..de010bdb 100644 --- a/lib/idp_common_pkg/idp_common/assessment/service.py +++ b/lib/idp_common_pkg/idp_common/assessment/service.py @@ -20,6 +20,7 @@ from typing import Any from idp_common import bedrock, image, metrics, s3, utils +from idp_common.assessment.geometry_utils import extract_geometry_from_nested_dict from idp_common.assessment.models import ( ConfidenceAlert, DocumentContent, @@ -621,152 +622,9 @@ def _get_text_confidence_data(self, page) -> str: return "" # ============================================================================ - # GEOMETRY PROCESSING + # GEOMETRY PROCESSING (uses shared utilities from geometry_utils) # ============================================================================ - def _convert_bbox_to_geometry( - self, bbox_coords: list[float], page_num: int - ) -> dict[str, Any]: - """ - Convert [x1,y1,x2,y2] coordinates to geometry format. - - Args: - bbox_coords: List of 4 coordinates [x1, y1, x2, y2] in 0-1000 scale - page_num: Page number where the bounding box appears - - Returns: - Dictionary in geometry format compatible with pattern-1 UI - """ - if len(bbox_coords) != 4: - raise ValueError(f"Expected 4 coordinates, got {len(bbox_coords)}") - - x1, y1, x2, y2 = bbox_coords - - # Ensure coordinates are in correct order - x1, x2 = min(x1, x2), max(x1, x2) - y1, y2 = min(y1, y2), max(y1, y2) - - # Convert from normalized 0-1000 scale to 0-1 - left = x1 / 1000.0 - top = y1 / 1000.0 - width = (x2 - x1) / 1000.0 - height = (y2 - y1) / 1000.0 - - return { - "boundingBox": {"top": top, "left": left, "width": width, "height": height}, - "page": page_num, - } - - def _process_single_assessment_geometry( - self, attr_assessment: dict[str, Any], attr_name: str = "" - ) -> dict[str, Any]: - """ - Process geometry data for a single assessment (with confidence key). - - Args: - attr_assessment: Single assessment dictionary with confidence data - attr_name: Name of attribute for logging - - Returns: - Enhanced assessment with geometry converted to proper format - """ - enhanced_attr = attr_assessment.copy() - - # Check if this assessment includes bbox data - if "bbox" in attr_assessment or "page" in attr_assessment: - # Both bbox and page are required for valid geometry - if "bbox" in attr_assessment and "page" in attr_assessment: - try: - bbox_coords = attr_assessment["bbox"] - page_num = attr_assessment["page"] - - # Validate bbox coordinates - if isinstance(bbox_coords, list) and len(bbox_coords) == 4: - # Convert to geometry format - geometry = self._convert_bbox_to_geometry(bbox_coords, page_num) - enhanced_attr["geometry"] = [geometry] - - logger.debug( - f"Converted bounding box for {attr_name}: {bbox_coords} -> geometry format" - ) - else: - logger.warning( - f"Invalid bounding box format for {attr_name}: {bbox_coords}" - ) - - except Exception as e: - logger.warning( - f"Failed to process bounding box for {attr_name}: {str(e)}" - ) - else: - # If only one of bbox/page exists, log a warning about incomplete data - if "bbox" in attr_assessment and "page" not in attr_assessment: - logger.warning( - f"Found bbox without page for {attr_name} - removing incomplete bbox data" - ) - elif "page" in attr_assessment and "bbox" not in attr_assessment: - logger.warning( - f"Found page without bbox for {attr_name} - removing incomplete page data" - ) - - # Always remove raw bbox/page data from output (whether processed or incomplete) - enhanced_attr.pop("bbox", None) - enhanced_attr.pop("page", None) - - return enhanced_attr - - def _extract_geometry_from_assessment( - self, assessment_data: dict[str, Any] - ) -> dict[str, Any]: - """ - Extract geometry data from assessment response and convert to proper format. - Now supports recursive processing of nested group attributes. - - Args: - assessment_data: Dictionary containing assessment results from LLM - - Returns: - Enhanced assessment data with geometry information converted to proper format - """ - enhanced_assessment = {} - - for attr_name, attr_assessment in assessment_data.items(): - if isinstance(attr_assessment, dict): - # Check if this is a direct confidence assessment - if "confidence" in attr_assessment: - # This is a direct assessment - process its geometry - enhanced_assessment[attr_name] = ( - self._process_single_assessment_geometry( - attr_assessment, attr_name - ) - ) - else: - # This is a group attribute (no direct confidence) - recursively process nested attributes - logger.debug(f"Processing group attribute: {attr_name}") - enhanced_assessment[attr_name] = ( - self._extract_geometry_from_assessment(attr_assessment) - ) - - elif isinstance(attr_assessment, list): - # Handle list attributes - process each item recursively - enhanced_list = [] - for i, item_assessment in enumerate(attr_assessment): - if isinstance(item_assessment, dict): - # Recursively process each list item - enhanced_item = self._extract_geometry_from_assessment( - item_assessment - ) - enhanced_list.append(enhanced_item) - else: - # Non-dict items pass through unchanged - enhanced_list.append(item_assessment) - enhanced_assessment[attr_name] = enhanced_list - else: - # Other types pass through unchanged - enhanced_assessment[attr_name] = attr_assessment - - return enhanced_assessment - # ============================================================================ # RESULT PROCESSING # ============================================================================ @@ -811,7 +669,7 @@ def _process_assessment_response( # Process bounding boxes automatically if bbox data is present try: logger.debug("Checking for bounding box data in assessment response") - assessment_data = self._extract_geometry_from_assessment(assessment_data) + assessment_data = extract_geometry_from_nested_dict(assessment_data) except Exception as e: logger.warning(f"Failed to extract geometry data: {str(e)}") diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_models.py b/lib/idp_common_pkg/idp_common/assessment/strands_models.py index 80cc261e..ba04ac65 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_models.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_models.py @@ -9,6 +9,8 @@ from pydantic import BaseModel, Field +from idp_common.assessment.models import Geometry + class BoundingBox(BaseModel): """Bounding box coordinates in normalized 0-1000 scale.""" @@ -21,20 +23,18 @@ class BoundingBox(BaseModel): def to_geometry(self) -> dict[str, Any]: """ - Convert to IDP geometry format. + Convert to IDP geometry format compatible with UI. Returns: - Dictionary with BoundingBox and Page in IDP format + Dictionary in UI-compatible format (lowercase, no array wrapper here) """ - return { - "BoundingBox": { - "Width": (self.x2 - self.x1) / 1000.0, - "Height": (self.y2 - self.y1) / 1000.0, - "Left": self.x1 / 1000.0, - "Top": self.y1 / 1000.0, - }, - "Page": self.page, - } + # Create proper Geometry object + geometry = Geometry.from_bbox_list( + [self.x1, self.y1, self.x2, self.y2], self.page + ) + + # Return UI format (will be wrapped in array by caller) + return geometry.to_ui_format() class ConfidenceAssessment(BaseModel): @@ -43,18 +43,10 @@ class ConfidenceAssessment(BaseModel): value: Any = Field(..., description="The extracted value") confidence: float = Field(..., ge=0.0, le=1.0, description="Confidence score 0-1") reasoning: str = Field(..., description="Explanation for the confidence score") - threshold: float = Field( - ..., ge=0.0, le=1.0, description="Required confidence threshold" - ) bounding_box: BoundingBox | None = Field( None, description="Location of value in document" ) - @property - def meets_threshold(self) -> bool: - """Computed field: whether confidence meets threshold.""" - return self.confidence >= self.threshold - class AssessmentOutput(BaseModel): """ @@ -71,6 +63,3 @@ class AssessmentOutput(BaseModel): assessment: ConfidenceAssessment = Field( ..., description="Confidence assessment for this specific field" ) - alerts: list[str] = Field( - default_factory=list, description="Any confidence threshold alerts or issues" - ) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index f4963261..f05aae12 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -17,7 +17,11 @@ from strands.types.content import CachePoint, ContentBlock, Message from strands.types.media import ImageContent, ImageSource -from idp_common.assessment.models import AssessmentResult, AssessmentTask +from idp_common.assessment.models import ( + AssessmentResult, + AssessmentTask, + FieldAssessmentData, +) from idp_common.assessment.strands_models import AssessmentOutput from idp_common.assessment.strands_tools import create_strands_tools from idp_common.bedrock import build_model_config @@ -401,33 +405,41 @@ def _convert_to_assessment_result( metering: dict[str, Any], processing_time: float, ) -> AssessmentResult: - """Convert Strands AssessmentOutput to AssessmentResult.""" - + """Convert Strands AssessmentOutput to AssessmentResult with standardized geometry format.""" # Single field assessment field_name = output.field_name assessment = output.assessment - # Build assessment data with confidence score - assessment_data = { - field_name: { - "confidence": assessment.confidence, - "value": assessment.value, - "reasoning": assessment.reasoning, - } - } + # Create standardized field assessment data + field_data = FieldAssessmentData.from_llm_response( + confidence=assessment.confidence, + value=assessment.value, + reasoning=assessment.reasoning, + confidence_threshold=task.confidence_threshold, + bbox_coords=( + [ + assessment.bounding_box.x1, + assessment.bounding_box.y1, + assessment.bounding_box.x2, + assessment.bounding_box.y2, + ] + if assessment.bounding_box + else None + ), + page_num=assessment.bounding_box.page if assessment.bounding_box else None, + ) - # Add geometry if bounding box provided - if assessment.bounding_box: - assessment_data[field_name]["Geometry"] = assessment.bounding_box.to_geometry() + # Convert to explainability format + assessment_data = {field_name: field_data.to_explainability_format()} # Check for confidence threshold violations confidence_alerts = [] - if not assessment.meets_threshold: + if assessment.confidence < task.confidence_threshold: confidence_alerts.append( { "attribute_name": field_name, "confidence": assessment.confidence, - "confidence_threshold": assessment.threshold, + "confidence_threshold": task.confidence_threshold, } ) From cedf899550e494f73683cfa2e69e5822f84fef6b Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 17:22:02 +0000 Subject: [PATCH 19/30] memory update --- patterns/pattern-2/template.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/patterns/pattern-2/template.yaml b/patterns/pattern-2/template.yaml index 6605c8af..43bd97e6 100644 --- a/patterns/pattern-2/template.yaml +++ b/patterns/pattern-2/template.yaml @@ -1820,7 +1820,7 @@ Resources: Command: - "index.handler" Timeout: 900 - MemorySize: 512 + MemorySize: 4096 Tracing: Active Environment: Variables: @@ -1946,7 +1946,7 @@ Resources: Command: - "index.handler" Timeout: 900 - MemorySize: 512 + MemorySize: 4096 Tracing: Active Environment: Variables: From a92f70ea5d1941cb8c791b751c66b0c227273ff2 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 22:40:02 +0000 Subject: [PATCH 20/30] cleanup --- .../idp_common/assessment/example_usage.py | 9 +- .../idp_common/assessment/granular_service.py | 89 ++- .../idp_common/assessment/models.py | 4 - .../idp_common/assessment/strands_service.py | 13 +- .../idp_common/assessment/strands_tools.py | 16 - .../idp_common/image/__init__.py | 2 +- .../idp_common/utils/pdf_helpers.py | 4 +- .../test_granular_assessment_full.py | 684 ++++++++++++++++++ .../tests/unit/assessment/conftest.py | 1 + .../test_aggregate_assessment_results.py | 430 +++++++++++ .../unit/config/test_configuration_sync.py | 4 +- .../tests/unit/test_granular_assessment.py | 9 +- lib/idp_common_pkg/verify_stickler.py | 2 +- 13 files changed, 1187 insertions(+), 80 deletions(-) create mode 100755 lib/idp_common_pkg/test_granular_assessment_full.py create mode 100644 lib/idp_common_pkg/tests/unit/assessment/test_aggregate_assessment_results.py diff --git a/lib/idp_common_pkg/idp_common/assessment/example_usage.py b/lib/idp_common_pkg/idp_common/assessment/example_usage.py index 4cbe36f0..ebc8c0d0 100644 --- a/lib/idp_common_pkg/idp_common/assessment/example_usage.py +++ b/lib/idp_common_pkg/idp_common/assessment/example_usage.py @@ -142,14 +142,15 @@ def example_granular_assessment(): # Demonstrate task creation (this would normally be done internally) if hasattr(assessment_service, "_create_assessment_tasks"): - attributes = assessment_service._get_class_attributes("Bank Statement") - tasks = assessment_service._create_assessment_tasks( - example_extraction_results, attributes, 0.9 + class_schema = assessment_service._get_class_schema("Bank Statement") + properties = class_schema.get("properties", {}) + tasks, assessment_structure = assessment_service._create_assessment_tasks( + example_extraction_results, properties, 0.9 ) logger.info(f"\nCreated {len(tasks)} assessment tasks:") for task in tasks: - logger.info(f" - {task.task_id}: {task.task_type} for {task.attributes}") + logger.info(f" - {task.task_id}: {task.task_type} for {task.field_name}") return assessment_service, config diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 9a5fd70c..acb338a4 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -309,7 +309,6 @@ def _traverse( field_name=_convert_field_path_to_string(item_path), field_schema=items_schema, confidence_threshold=threshold, - parent_assessment_dict=assessment_list, # type: ignore ) tasks.append(task) task_counter[0] += 1 @@ -332,7 +331,6 @@ def _traverse( field_name=_convert_field_path_to_string(field_path), field_schema=prop_schema, confidence_threshold=threshold, - parent_assessment_dict=parent_dict, ) tasks.append(task) task_counter[0] += 1 @@ -536,6 +534,32 @@ def _is_throttling_exception(self, exception: Exception) -> bool: for throttle_term in self.throttling_exceptions ) + def _insert_at_field_path( + self, + structure: dict[str, Any], + field_path: tuple[str | int, ...], + value: Any, + ) -> None: + """ + Navigate through structure using field_path and insert value at the end. + + Args: + structure: The assessment structure to navigate + field_path: Tuple path like ("Account Holder Address", "City") or ("Transactions", 0, "Amount") + value: The assessment data to insert + + Example: + field_path = ("Account Holder Address", "City") + -> structure["Account Holder Address"]["City"] = value + + field_path = ("Transactions", 0, "Amount") + -> structure["Transactions"][0]["Amount"] = value + """ + parent = structure + for key in field_path[:-1]: + parent = parent[key] + parent[field_path[-1]] = value + def _aggregate_assessment_results( self, tasks: list[AssessmentTask], @@ -543,7 +567,7 @@ def _aggregate_assessment_results( assessment_structure: dict[str, Any], ) -> tuple[dict[str, Any], list[dict[str, Any]], dict[str, Any]]: """ - Aggregate individual task results into assessment structure using direct parent insertion. + Aggregate individual task results into assessment structure using field_path navigation. Args: tasks: list of assessment tasks @@ -553,13 +577,12 @@ def _aggregate_assessment_results( Returns: Tuple of (assessment_structure, confidence_alerts, aggregated_metering) """ - all_confidence_alerts = [] - aggregated_metering = {} + all_confidence_alerts: list[dict[str, Any]] = [] + aggregated_metering: dict[str, Any] = {} # Create a mapping from task_id to result result_map = {result.task_id: result for result in results} - # Process each task result - direct O(1) insertion using parent reference for task in tasks: result = result_map.get(task.task_id) if not result or not result.success: @@ -575,45 +598,31 @@ def _aggregate_assessment_results( # Add confidence alerts all_confidence_alerts.extend(result.confidence_alerts) - # Get assessment data from result - should be a single assessment object - # The Strands agent returns the assessment in result.assessment_data - assessment_obj = result.assessment_data + # Get assessment data directly from result + # strands_service returns flat assessment dict: {confidence, value, reasoning, ...} + field_assessment = result.assessment_data - if not isinstance(assessment_obj, dict): + if not isinstance(field_assessment, dict): logger.warning( - f"Task {task.task_id}: expected dict assessment, got {type(assessment_obj)}" + f"Task {task.task_id}: expected dict assessment, got {type(field_assessment)}" ) continue - # Add confidence_threshold to the assessment object - assessment_obj["confidence_threshold"] = task.confidence_threshold - - # Direct insertion using parent reference - O(1) operation! - parent = task.parent_assessment_dict - field_name = task.field_name - - if isinstance(parent, dict): - # Regular field - insert into parent dict - parent[field_name] = assessment_obj - elif isinstance(parent, list): - # Array item - get index from field_path - # field_path is like ("items", 0, "price") - second-to-last is the index - if len(task.field_path) >= 2 and isinstance(task.field_path[-2], int): - idx = task.field_path[-2] - # Replace the None placeholder we created during structure building - if idx < len(parent): - parent[idx] = assessment_obj - else: - logger.warning( - f"Task {task.task_id}: index {idx} out of range for list of length {len(parent)}" - ) - else: - logger.warning( - f"Task {task.task_id}: cannot determine array index from path {task.field_path}" - ) - else: - logger.warning( - f"Task {task.task_id}: unexpected parent type {type(parent)}" + # Add confidence_threshold if not already present + if "confidence_threshold" not in field_assessment: + field_assessment["confidence_threshold"] = task.confidence_threshold + + # Insert directly at field_path - no unwrapping needed + try: + self._insert_at_field_path( + assessment_structure, task.field_path, field_assessment + ) + logger.debug( + f"Task {task.task_id}: Inserted assessment at {task.field_path}" + ) + except (KeyError, IndexError, TypeError) as e: + logger.error( + f"Task {task.task_id}: Failed to insert at path {task.field_path}: {e}" ) return assessment_structure, all_confidence_alerts, aggregated_metering diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index f700e2df..11b0adb8 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -78,10 +78,6 @@ class AssessmentTask(BaseModel): # Confidence threshold for this field confidence_threshold: float - # Direct reference to parent container in assessment structure (for O(1) insertion) - # Can be Dict for regular fields or list for array items - parent_assessment_dict: dict[str, Any] | list[Any] - class AssessmentResult(BaseModel): """Result of a single assessment task (used by both granular and strands services).""" diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index f05aae12..e6f997ee 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -405,8 +405,12 @@ def _convert_to_assessment_result( metering: dict[str, Any], processing_time: float, ) -> AssessmentResult: - """Convert Strands AssessmentOutput to AssessmentResult with standardized geometry format.""" - # Single field assessment + """ + Convert Strands AssessmentOutput to AssessmentResult with standardized geometry format. + + The assessment_data is returned as a flat dict (not wrapped by field name) because + the aggregation step uses task.field_path for insertion into the final structure. + """ field_name = output.field_name assessment = output.assessment @@ -429,8 +433,9 @@ def _convert_to_assessment_result( page_num=assessment.bounding_box.page if assessment.bounding_box else None, ) - # Convert to explainability format - assessment_data = {field_name: field_data.to_explainability_format()} + # Return assessment data directly (not wrapped by field name) + # The aggregation step uses task.field_path for proper insertion + assessment_data = field_data.to_explainability_format() # Check for confidence threshold violations confidence_alerts = [] diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index ca5fcdeb..de79e59b 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -37,22 +37,6 @@ class ViewImageInput(BaseModel): @tool def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: - """ - Submit your final confidence assessment. - - Use this tool when you have: - 1. Located the values in the document images - 2. Determined precise bounding box coordinates using ruler markings - 3. Assessed the confidence based on clarity and accuracy - - Args: - assessment: Dictionary with: - - assessments: dict mapping attribute names to ConfidenceAssessment - - alerts: list of any threshold alerts (optional) - - Returns: - Success confirmation message or validation error details - """ # Validate assessment structure and return helpful errors validated_assessment = AssessmentOutput.model_validate(assessment) diff --git a/lib/idp_common_pkg/idp_common/image/__init__.py b/lib/idp_common_pkg/idp_common/image/__init__.py index eb43f726..09fb21f4 100644 --- a/lib/idp_common_pkg/idp_common/image/__init__.py +++ b/lib/idp_common_pkg/idp_common/image/__init__.py @@ -79,7 +79,7 @@ def resize_image( logger.info( f"Resizing image from {current_width}x{current_height} to {new_width}x{new_height} (scale: {scale_factor:.3f})" ) - image = image.resize((new_width, new_height), Image.LANCZOS) + image = image.resize((new_width, new_height), Image.Resampling.LANCZOS) # Save in original format if possible img_byte_array = io.BytesIO() diff --git a/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py b/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py index c102ee52..440a1b2c 100644 --- a/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py +++ b/lib/idp_common_pkg/idp_common/utils/pdf_helpers.py @@ -75,7 +75,7 @@ def pdf_page_to_image( # Render page to pixmap mat = fitz.Matrix(dpi_scale, dpi_scale) - pix = page.get_pixmap(matrix=mat) + pix = page.get_pixmap(matrix=mat) # pyright: ignore[reportAttributeAccessIssue] # Convert to PNG bytes png_bytes = pix.tobytes("png") @@ -144,7 +144,7 @@ def pdf_to_images( # Render page to pixmap mat = fitz.Matrix(dpi_scale, dpi_scale) - pix = page.get_pixmap(matrix=mat) + pix = page.get_pixmap(matrix=mat) # pyright: ignore[reportAttributeAccessIssue] # Convert to PNG bytes png_bytes = pix.tobytes("png") diff --git a/lib/idp_common_pkg/test_granular_assessment_full.py b/lib/idp_common_pkg/test_granular_assessment_full.py new file mode 100755 index 00000000..d3a3c8ce --- /dev/null +++ b/lib/idp_common_pkg/test_granular_assessment_full.py @@ -0,0 +1,684 @@ +#!/usr/bin/env python3 +# pyright: reportAttributeAccessIssue=false +# ruff: noqa +""" +Full integration test for granular assessment service with mocked AWS services. + +This script: +1. Uses moto's mock_aws() to mock S3 and other AWS services +2. Configures passthrough for Bedrock API calls (to use real AWS Bedrock) +3. Creates a complete test document with pages and images +4. Uploads test data to mocked S3 +5. Runs the full assessment pipeline with REAL Bedrock calls +6. Validates the results + +Usage: + python test_granular_assessment_full.py [--max-workers N] + +Requirements: + - Valid AWS credentials for Bedrock access (calls go to real Bedrock!) + - Bedrock model access: us.anthropic.claude-3-5-sonnet-20241022-v2:0 + - pip install moto[all] boto3 PyMuPDF pillow + +Note: + This test uses moto's passthrough feature to allow Bedrock calls + while still mocking S3/DynamoDB. You will incur real Bedrock costs! + + Uses real sample document from samples/bank-statement-multipage.pdf +""" + +import argparse +import json +from pathlib import Path +from typing import Any + +import boto3 +from idp_common.assessment.granular_service import GranularAssessmentService +from idp_common.config.models import AssessmentConfig, IDPConfig +from idp_common.models import Document, Page, Section +from idp_common.utils.pdf_helpers import create_minimal_png, pdf_page_to_image +from moto import mock_aws + +# Test configuration +TEST_BUCKET = "test-idp-bucket" +TEST_REGION = "us-east-1" + + +def load_sample_document_image() -> bytes: + """Load a real sample document from the samples folder and convert first page to PNG.""" + # Use the bank statement sample - it's a real invoice-like document + sample_path = ( + Path(__file__).parent.parent.parent / "samples" / "bank-statement-multipage.pdf" + ) + + if not sample_path.exists(): + print(f"Warning: Sample document not found at {sample_path}") + print("Falling back to minimal test image") + return create_minimal_png() + + try: + # Convert first page of PDF to image with size limits + # Max 1200x1200 pixels for ~1MP (~100-200KB depending on content) + png_bytes = pdf_page_to_image( + pdf_path=sample_path, + page_number=0, # First page + max_width=1200, + max_height=1200, + dpi_scale=1.0, # Standard DPI + ) + + print( + f"āœ“ Loaded real document image from {sample_path.name} ({len(png_bytes):,} bytes)" + ) + return png_bytes + + except Exception as e: + print(f"Warning: Failed to load sample document: {e}") + print("Falling back to minimal test image") + return create_minimal_png() + + +def create_sample_extraction_result() -> dict[str, Any]: + """Create sample extraction results for a bank statement.""" + return { + "Account Number": "1234567890", + "Statement Period": "January 2024", + "Account Holder Address": { + "Street Number": "123", + "Street Name": "Main St", + "City": "San Francisco", + "State": "CA", + "ZIP Code": "94102", + }, + "Transactions": [ + { + "Date": "01/05/2024", + "Description": "Direct Deposit - Acme Corp", + "Amount": 2500.00, + }, + { + "Date": "01/10/2024", + "Description": "ATM Withdrawal", + "Amount": -200.00, + }, + { + "Date": "01/15/2024", + "Description": "Online Payment - Electric Co", + "Amount": -150.00, + }, + ], + } + + +def create_sample_schema() -> dict[str, Any]: + """Create a sample JSON schema for bank statement documents.""" + return { + "type": "object", + "x-aws-idp-document-type": "Bank Statement", + "properties": { + "Account Number": { + "type": "string", + "description": "Primary account identifier", + "x-aws-idp-confidence-threshold": 0.95, + }, + "Statement Period": { + "type": "string", + "description": "Statement period (e.g., January 2024)", + "x-aws-idp-confidence-threshold": 0.90, + }, + "Account Holder Address": { + "type": "object", + "description": "Complete address information for the account holder", + "x-aws-idp-confidence-threshold": 0.85, + "properties": { + "Street Number": { + "type": "string", + "description": "House or building number", + "x-aws-idp-confidence-threshold": 0.90, + }, + "Street Name": { + "type": "string", + "description": "Name of the street", + "x-aws-idp-confidence-threshold": 0.80, + }, + "City": { + "type": "string", + "description": "City name", + "x-aws-idp-confidence-threshold": 0.90, + }, + "State": { + "type": "string", + "description": "State abbreviation (e.g., CA, NY)", + "x-aws-idp-confidence-threshold": 0.90, + }, + "ZIP Code": { + "type": "string", + "description": "5 or 9 digit postal code", + "x-aws-idp-confidence-threshold": 0.90, + }, + }, + }, + "Transactions": { + "type": "array", + "description": "List of all transactions in the statement period", + "items": { + "type": "object", + "properties": { + "Date": { + "type": "string", + "format": "date", + "description": "Transaction date (MM/DD/YYYY)", + "x-aws-idp-confidence-threshold": 0.90, + }, + "Description": { + "type": "string", + "description": "Transaction description or merchant name", + "x-aws-idp-confidence-threshold": 0.70, + }, + "Amount": { + "type": "number", + "description": "Transaction amount (positive for deposits, negative for withdrawals)", + "x-aws-idp-confidence-threshold": 0.95, + }, + }, + }, + }, + }, + } + + +def create_parsed_text() -> str: + """Create sample parsed text.""" + return """BANK STATEMENT + +Account Number: 1234567890 +Statement Period: January 2024 + +Account Holder Address: +123 Main St +San Francisco, CA 94102 + +Transaction History: +Date Description Amount +01/05/2024 Direct Deposit - Acme Corp $2,500.00 +01/10/2024 ATM Withdrawal -$200.00 +01/15/2024 Online Payment - Electric Co -$150.00 + +Ending Balance: $2,150.00 +""" + + +def create_raw_ocr_data() -> dict[str, Any]: + """Create sample raw OCR data.""" + return { + "Blocks": [ + { + "BlockType": "LINE", + "Text": "BANK STATEMENT", + "Confidence": 99.5, + "Geometry": { + "BoundingBox": { + "Left": 0.4, + "Top": 0.1, + "Width": 0.2, + "Height": 0.05, + } + }, + }, + { + "BlockType": "LINE", + "Text": "Account Number: 1234567890", + "Confidence": 98.9, + "Geometry": { + "BoundingBox": { + "Left": 0.35, + "Top": 0.15, + "Width": 0.3, + "Height": 0.04, + } + }, + }, + { + "BlockType": "LINE", + "Text": "123 Main St", + "Confidence": 98.2, + "Geometry": { + "BoundingBox": { + "Left": 0.3, + "Top": 0.25, + "Width": 0.2, + "Height": 0.03, + } + }, + }, + ] + } + + +def create_text_confidence_data() -> dict[str, Any]: + """Create sample text confidence data.""" + return { + "text_blocks": [ + { + "text": "BANK STATEMENT", + "confidence": 0.995, + "bbox": [400, 100, 600, 150], + "page": 1, + }, + { + "text": "Account Number: 1234567890", + "confidence": 0.989, + "bbox": [350, 150, 650, 190], + "page": 1, + }, + { + "text": "123 Main St", + "confidence": 0.985, + "bbox": [300, 250, 500, 280], + "page": 1, + }, + { + "text": "$2,500.00", + "confidence": 0.975, + "bbox": [600, 400, 750, 430], + "page": 1, + }, + ] + } + + +def setup_s3_test_data( + s3_client, bucket: str, doc_id: str +) -> tuple[Document, dict[str, Any]]: + """ + Set up test data in mocked S3 bucket. + + Returns: + Tuple of (Document, extraction_result) + """ + print("šŸ“¦ Setting up S3 test data...") + + # Create bucket + s3_client.create_bucket(Bucket=bucket) + print(f" Created bucket: {bucket}") + + # Upload page image + image_key = f"documents/{doc_id}/pages/page-1.png" + image_bytes = load_sample_document_image() + s3_client.put_object( + Bucket=bucket, Key=image_key, Body=image_bytes, ContentType="image/png" + ) + print( + f" āœ“ Uploaded image: s3://{bucket}/{image_key} ({len(image_bytes):,} bytes)" + ) + + # Upload parsed text + text_key = f"documents/{doc_id}/pages/page-1.txt" + s3_client.put_object( + Bucket=bucket, + Key=text_key, + Body=create_parsed_text().encode("utf-8"), + ContentType="text/plain", + ) + print(f" āœ“ Uploaded text: s3://{bucket}/{text_key}") + + # Upload raw OCR data + # raw_ocr_key = f"documents/{doc_id}/pages/page-1-raw.json" + # s3_client.put_object( + # Bucket=bucket, + # Key=raw_ocr_key, + # Body=json.dumps(create_raw_ocr_data()).encode("utf-8"), + # ContentType="application/json", + # ) + # print(f" āœ“ Uploaded raw OCR: s3://{bucket}/{raw_ocr_key}") + + # Upload text confidence data + confidence_key = f"documents/{doc_id}/pages/page-1-confidence.json" + s3_client.put_object( + Bucket=bucket, + Key=confidence_key, + Body=json.dumps(create_text_confidence_data()).encode("utf-8"), + ContentType="application/json", + ) + print(f" āœ“ Uploaded confidence: s3://{bucket}/{confidence_key}") + + # Create and upload extraction results + extraction_result = create_sample_extraction_result() + extraction_data = { + "document_class": {"type": "Bank Statement"}, + "split_document": {"page_indices": [1]}, + "inference_result": extraction_result, + "metadata": {"extraction_time_seconds": 2.5}, + "explainability_info": [], + } + + extraction_key = f"documents/{doc_id}/extraction/result.json" + s3_client.put_object( + Bucket=bucket, + Key=extraction_key, + Body=json.dumps(extraction_data).encode("utf-8"), + ContentType="application/json", + ) + print(f" āœ“ Uploaded extraction: s3://{bucket}/{extraction_key}") + + # Create Document object + doc = Document( + id=doc_id, + workflow_execution_arn=f"arn:aws:states:{TEST_REGION}:123456789012:execution:test-workflow:test-exec-001", + pages={}, + sections=[], + ) + + # Add page + page = Page( + page_id="1", + image_uri=f"s3://{bucket}/{image_key}", + parsed_text_uri=f"s3://{bucket}/{text_key}", + # raw_text_uri=f"s3://{bucket}/{raw_ocr_key}", + text_confidence_uri=f"s3://{bucket}/{confidence_key}", + ) + doc.pages["1"] = page + + # Add section + section = Section( + section_id="section-001", + classification="Bank Statement", + page_ids=["1"], + extraction_result_uri=f"s3://{bucket}/{extraction_key}", + ) + doc.sections.append(section) + + print(" āœ“ Created Document object with 1 page, 1 section") + print() + + return doc, extraction_result + + +def create_test_config(schema: dict[str, Any], max_workers: int = 2) -> IDPConfig: + """Create test configuration.""" + return IDPConfig( + classes=[schema], + assessment=AssessmentConfig( + enabled=True, + model="us.anthropic.claude-sonnet-4-20250514-v1:0", # Use inference profile + system_prompt="You are a document analysis assessment expert. Your role is to evaluate the confidence and accuracy of data extraction results by analyzing them against source documents. Provide accurate confidence scores for each assessment.", + temperature=0.0, + max_tokens=4096, + default_confidence_threshold=0.90, + max_workers=max_workers, + # Use default ImageConfig (1200x1200 for ~1MP images) + ), + ) + + +def print_assessment_results(doc: Document, section_id: str, s3_client): + """Print detailed assessment results.""" + print("=" * 80) + print("šŸ“Š Assessment Results") + print("=" * 80) + print() + + print(f"Document Status: {doc.status}") + print(f"Document ID: {doc.id}") + print() + + # Find assessed section + section = next((s for s in doc.sections if s.section_id == section_id), None) + if not section: + print("āŒ Section not found") + return + + print(f"Section ID: {section.section_id}") + print(f"Classification: {section.classification}") + + # Check for confidence alerts + if section.confidence_threshold_alerts: + print(f"\nāš ļø Confidence Alerts: {len(section.confidence_threshold_alerts)}") + for i, alert in enumerate(section.confidence_threshold_alerts[:5]): + print(f" {i + 1}. {alert}") + if len(section.confidence_threshold_alerts) > 5: + print(f" ... and {len(section.confidence_threshold_alerts) - 5} more") + else: + print("\nāœ… No confidence threshold alerts") + + # Read assessment data from S3 + if section.extraction_result_uri: + print("\nšŸ“„ Assessment Data:") + + # Parse S3 URI + uri_parts = section.extraction_result_uri.replace("s3://", "").split("/", 1) + bucket = uri_parts[0] + key = uri_parts[1] + + try: + response = s3_client.get_object(Bucket=bucket, Key=key) + extraction_with_assessment = json.loads(response["Body"].read()) + + if "explainability_info" in extraction_with_assessment: + explainability = extraction_with_assessment["explainability_info"] + if explainability and len(explainability) > 0: + assessment_data = explainability[0] + + print(f" Assessed {len(assessment_data)} fields\n") + + # Show sample assessments + sample_fields = list(assessment_data.keys())[:5] + for field in sample_fields: + field_assessment = assessment_data[field] + if ( + isinstance(field_assessment, dict) + and "confidence" in field_assessment + ): + conf = field_assessment["confidence"] + value = field_assessment.get("value", "N/A") + threshold = field_assessment.get( + "confidence_threshold", 0.90 + ) + status_icon = "āœ…" if conf >= threshold else "āš ļø" + print(f" {status_icon} {field}:") + print(f" Value: {value}") + print( + f" Confidence: {conf:.2f} (threshold: {threshold:.2f})" + ) + + # Show bounding box if present + if "bounding_box" in field_assessment: + bbox = field_assessment["bounding_box"] + if isinstance(bbox, dict): + print( + f" Bounding Box: [x1={bbox.get('x1', 'N/A')}, y1={bbox.get('y1', 'N/A')}, " + f"x2={bbox.get('x2', 'N/A')}, y2={bbox.get('y2', 'N/A')}, page={bbox.get('page', 'N/A')}]" + ) + elif isinstance(bbox, list) and len(bbox) > 0: + # If multiple bounding boxes, show first one + first_bbox = bbox[0] + print( + f" Bounding Box: [x1={first_bbox.get('x1', 'N/A')}, y1={first_bbox.get('y1', 'N/A')}, " + f"x2={first_bbox.get('x2', 'N/A')}, y2={first_bbox.get('y2', 'N/A')}, page={first_bbox.get('page', 'N/A')}]" + ) + if len(bbox) > 1: + print( + f" (+ {len(bbox) - 1} more bounding boxes)" + ) + + if len(assessment_data) > 5: + print(f"\n ... and {len(assessment_data) - 5} more fields") + + # Show metadata + if "metadata" in extraction_with_assessment: + metadata = extraction_with_assessment["metadata"] + print("\nā±ļø Timing:") + if "assessment_time_seconds" in metadata: + print( + f" Assessment time: {metadata['assessment_time_seconds']:.2f}s" + ) + if "assessment_tasks_total" in metadata: + print(f" Total tasks: {metadata['assessment_tasks_total']}") + print( + f" Successful: {metadata.get('assessment_tasks_successful', 0)}" + ) + print( + f" Failed: {metadata.get('assessment_tasks_failed', 0)}" + ) + except Exception as e: + print(f" Could not read assessment data: {e}") + + # Show metering + if doc.metering: + print("\nšŸ’° Token Usage:") + for key, value in doc.metering.items(): + if isinstance(value, dict): + total_tokens = value.get("totalTokens", 0) + input_tokens = value.get("inputTokens", 0) + output_tokens = value.get("outputTokens", 0) + cache_read = value.get("cacheReadInputTokens", 0) + cache_write = value.get("cacheWriteInputTokens", 0) + + print(f" {key}:") + print(f" Total: {total_tokens:,} tokens") + print(f" Input: {input_tokens:,} | Output: {output_tokens:,}") + if cache_read > 0: + print(f" Cache read: {cache_read:,}") + if cache_write > 0: + print(f" Cache write: {cache_write:,}") + + +def main(): + """Run the full assessment test.""" + parser = argparse.ArgumentParser( + description="Test granular assessment with mocked AWS" + ) + parser.add_argument( + "--max-workers", + type=int, + default=2, + help="Number of parallel workers (default: 2)", + ) + args = parser.parse_args() + + print("=" * 80) + print("Granular Assessment Full Integration Test") + print(f"(With mocked S3, real Bedrock calls, {args.max_workers} workers)") + print("=" * 80) + print() + + doc_id = "test-bank-statement-001" + + # Use moto to mock AWS services (S3, DynamoDB, etc.) + # But allow Bedrock calls to pass through to real AWS + with mock_aws( + config={ + "core": { + "mock_credentials": False, + "passthrough": { + "urls": [ + r".*bedrock.*\.amazonaws\.com.*", + r".*bedrock-runtime.*\.amazonaws\.com.*", + ] + }, + } + } + ): + # Create S3 client (will use mocked S3) + s3_client = boto3.client("s3", region_name=TEST_REGION) + + # Set up test data + doc, extraction_result = setup_s3_test_data(s3_client, TEST_BUCKET, doc_id) + + print("šŸ“‹ Test Document:") + print(f" ID: {doc.id}") + print(f" Pages: {len(doc.pages)}") + print(f" Sections: {len(doc.sections)}") + print(f" Fields in extraction: {len(extraction_result)} top-level") + print() + + # Create schema and config + schema = create_sample_schema() + config = create_test_config(schema, max_workers=args.max_workers) + + print("āš™ļø Initializing GranularAssessmentService...") + service = GranularAssessmentService( + region=TEST_REGION, config=config, cache_table=None + ) + print(" āœ“ Service initialized") + print(f" Max workers: {service.max_workers}") + print(f" Parallel: {service.enable_parallel}") + print() + + # Get task count + section = doc.sections[0] + properties = schema["properties"] + + tasks, _ = service._create_assessment_tasks( + extraction_results=extraction_result, + properties=properties, + default_confidence_threshold=config.assessment.default_confidence_threshold, + ) + + print("šŸ” Assessment Plan:") + print(f" Total tasks to execute: {len(tasks)}") + print(" Expected: 17 tasks") + print(" - 2 top-level scalars (Account Number, Statement Period)") + print( + " - 5 address fields (Street Number, Street Name, City, State, ZIP Code)" + ) + print(" - 9 transaction fields (3 transactions Ɨ 3 fields each)") + print() + + print("=" * 80) + print("šŸš€ Running Full Assessment Pipeline") + print("=" * 80) + print() + print("āš ļø This will make REAL calls to AWS Bedrock!") + print( + f" Assessing {len(tasks)} fields with {service.max_workers} parallel workers" + ) + print(f" Model: {config.assessment.model}") + print() + + try: + # Run the assessment + print("ā³ Calling process_document_section()...") + print() + + updated_doc = service.process_document_section(doc, section.section_id) + + print() + print("āœ… Assessment completed successfully!") + print() + + # Print results + print_assessment_results(updated_doc, section.section_id, s3_client) + + print() + print("=" * 80) + print("āœ… Full Integration Test PASSED!") + print("=" * 80) + + except Exception as e: + print() + print(f"āŒ Assessment failed: {e}") + print() + + import traceback + + print("Full error traceback:") + traceback.print_exc() + print() + + print("=" * 80) + print("āŒ Full Integration Test FAILED") + print("=" * 80) + + # Still return success code if it was just a Bedrock auth issue + if "credentials" in str(e).lower() or "bedrock" in str(e).lower(): + print( + "\nšŸ’” This appears to be an AWS credentials/Bedrock access issue." + ) + print( + " The test infrastructure (S3 mocking, task creation) is working correctly." + ) + + +if __name__ == "__main__": + main() diff --git a/lib/idp_common_pkg/tests/unit/assessment/conftest.py b/lib/idp_common_pkg/tests/unit/assessment/conftest.py index a1b17cd9..352d58b2 100644 --- a/lib/idp_common_pkg/tests/unit/assessment/conftest.py +++ b/lib/idp_common_pkg/tests/unit/assessment/conftest.py @@ -20,5 +20,6 @@ sys.modules["strands.models.bedrock"] = MagicMock() sys.modules["strands.types"] = MagicMock() sys.modules["strands.types.content"] = MagicMock() +sys.modules["strands.types.media"] = MagicMock() sys.modules["strands.hooks"] = MagicMock() sys.modules["strands.hooks.events"] = MagicMock() diff --git a/lib/idp_common_pkg/tests/unit/assessment/test_aggregate_assessment_results.py b/lib/idp_common_pkg/tests/unit/assessment/test_aggregate_assessment_results.py new file mode 100644 index 00000000..cc546efa --- /dev/null +++ b/lib/idp_common_pkg/tests/unit/assessment/test_aggregate_assessment_results.py @@ -0,0 +1,430 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: MIT-0 + +""" +Unit tests for the _aggregate_assessment_results function in GranularAssessmentService. +Tests the aggregation logic with real-world data to ensure assessment data is properly +inserted into the assessment structure. +""" + +import pytest +from idp_common.assessment.granular_service import GranularAssessmentService +from idp_common.assessment.models import AssessmentResult, AssessmentTask + + +@pytest.mark.unit +class TestAggregateAssessmentResults: + """Tests for _aggregate_assessment_results function.""" + + def test_aggregate_simple_fields(self): + """Test aggregation with simple top-level fields using real data.""" + # Create service instance + service = GranularAssessmentService() + + # Create initial assessment structure (mimics what _create_assessment_tasks returns) + assessment_structure = { + "Account Number": None, + "Statement Period": None, + } + + # Create tasks using field_path (no parent references needed) + tasks = [ + AssessmentTask( + task_id="task_0", + task_type="attribute", + field_path=("Account Number",), + field_name="Account Number", + field_schema={"type": "string"}, + confidence_threshold=0.95, + ), + AssessmentTask( + task_id="task_1", + task_type="attribute", + field_path=("Statement Period",), + field_name="Statement Period", + field_schema={"type": "string"}, + confidence_threshold=0.9, + ), + ] + + # Create results with simplified data structure (flat assessment dict, not wrapped by field name) + results = [ + AssessmentResult( + task_id="task_0", + success=True, + assessment_data={ + "confidence": 0.0, + "value": "1234567890", + "reasoning": "The extracted value '1234567890' is completely incorrect.", + "confidence_threshold": 0.95, + "geometry": [ + { + "boundingBox": { + "top": 0.458, + "left": 0.215, + "width": 0.07, + "height": 0.014, + }, + "page": 1, + } + ], + }, + confidence_alerts=[ + { + "attribute_name": "Account Number", + "confidence": 0.0, + "confidence_threshold": 0.95, + } + ], + error_message=None, + processing_time=195.07, + metering={ + "assessment/bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0": { + "inputTokens": 17457, + "outputTokens": 1320, + } + }, + ), + AssessmentResult( + task_id="task_1", + success=True, + assessment_data={ + "confidence": 0.0, + "value": "January 2024", + "reasoning": "The extracted value 'January 2024' is completely incorrect.", + "confidence_threshold": 0.9, + "geometry": [ + { + "boundingBox": { + "top": 0.27, + "left": 0.058, + "width": 0.092, + "height": 0.015, + }, + "page": 1, + } + ], + }, + confidence_alerts=[ + { + "attribute_name": "Statement Period", + "confidence": 0.0, + "confidence_threshold": 0.9, + } + ], + error_message=None, + processing_time=148.42, + metering={ + "assessment/bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0": { + "inputTokens": 14097, + "outputTokens": 1104, + } + }, + ), + ] + + # Call the function under test + ( + aggregated_structure, + alerts, + metering, + ) = service._aggregate_assessment_results(tasks, results, assessment_structure) + + # Assertions + assert aggregated_structure is not None + assert "Account Number" in aggregated_structure + assert "Statement Period" in aggregated_structure + + # Check Account Number was properly inserted (not None!) + assert aggregated_structure["Account Number"] is not None + assert aggregated_structure["Account Number"]["confidence"] == 0.0 + assert aggregated_structure["Account Number"]["value"] == "1234567890" + assert "reasoning" in aggregated_structure["Account Number"] + assert "geometry" in aggregated_structure["Account Number"] + + # Check Statement Period was properly inserted + assert aggregated_structure["Statement Period"] is not None + assert aggregated_structure["Statement Period"]["confidence"] == 0.0 + assert aggregated_structure["Statement Period"]["value"] == "January 2024" + + # Check alerts + assert len(alerts) == 2 + + # Check metering + assert metering is not None + assert ( + "assessment/bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0" in metering + ) + + def test_aggregate_nested_fields(self): + """Test aggregation with nested object fields using real data.""" + service = GranularAssessmentService() + + # Create nested assessment structure + nested_address = { + "Street Number": None, + "Street Name": None, + "City": None, + } + assessment_structure = {"Account Holder Address": nested_address} + + # Create tasks using field_path for navigation + tasks = [ + AssessmentTask( + task_id="task_0", + task_type="attribute", + field_path=("Account Holder Address", "Street Number"), + field_name="Street Number", + field_schema={"type": "string"}, + confidence_threshold=0.85, + ), + AssessmentTask( + task_id="task_1", + task_type="attribute", + field_path=("Account Holder Address", "Street Name"), + field_name="Street Name", + field_schema={"type": "string"}, + confidence_threshold=0.85, + ), + AssessmentTask( + task_id="task_2", + task_type="attribute", + field_path=("Account Holder Address", "City"), + field_name="City", + field_schema={"type": "string"}, + confidence_threshold=0.85, + ), + ] + + # Create results with flat assessment data (not wrapped by field name) + results = [ + AssessmentResult( + task_id="task_0", + success=True, + assessment_data={ + "confidence": 0.0, + "value": "123", + "reasoning": "The extracted value '123' is completely incorrect.", + "confidence_threshold": 0.85, + "geometry": [ + { + "boundingBox": { + "top": 0.17, + "left": 0.114, + "width": 0.021, + "height": 0.012, + }, + "page": 1, + } + ], + }, + confidence_alerts=[], + error_message=None, + processing_time=216.45, + ), + AssessmentResult( + task_id="task_1", + success=True, + assessment_data={ + "confidence": 0.0, + "value": "Main St", + "reasoning": "The extracted value 'Main St' is completely incorrect.", + "confidence_threshold": 0.85, + "geometry": [ + { + "boundingBox": { + "top": 0.17, + "left": 0.13, + "width": 0.09, + "height": 0.02, + }, + "page": 1, + } + ], + }, + confidence_alerts=[], + error_message=None, + processing_time=217.03, + ), + AssessmentResult( + task_id="task_2", + success=True, + assessment_data={ + "confidence": 0.1, + "value": "San Francisco", + "reasoning": "The extracted value 'San Francisco' does not match.", + "confidence_threshold": 0.85, + "geometry": [ + { + "boundingBox": { + "top": 0.17, + "left": 0.2, + "width": 0.09, + "height": 0.015, + }, + "page": 1, + } + ], + }, + confidence_alerts=[], + error_message=None, + processing_time=217.32, + ), + ] + + # Call the function under test + ( + aggregated_structure, + alerts, + metering, + ) = service._aggregate_assessment_results(tasks, results, assessment_structure) + + # Assertions - check nested structure + assert "Account Holder Address" in aggregated_structure + nested = aggregated_structure["Account Holder Address"] + + # CRITICAL: These should NOT be None! + assert nested["Street Number"] is not None + assert nested["Street Number"]["confidence"] == 0.0 + assert nested["Street Number"]["value"] == "123" + + assert nested["Street Name"] is not None + assert nested["Street Name"]["confidence"] == 0.0 + assert nested["Street Name"]["value"] == "Main St" + + assert nested["City"] is not None + assert nested["City"]["confidence"] == 0.1 + assert nested["City"]["value"] == "San Francisco" + + def test_aggregate_array_fields(self): + """Test aggregation with array fields using real data.""" + service = GranularAssessmentService() + + # Create array structure - each array item is a dict + transaction_0 = {"Date": None, "Amount": None} + transaction_1 = {"Date": None, "Amount": None} + transactions_array = [transaction_0, transaction_1] + assessment_structure = {"Transactions": transactions_array} + + # Create tasks using field_path for navigation + tasks = [ + AssessmentTask( + task_id="task_0", + task_type="attribute", + field_path=("Transactions", 0, "Date"), + field_name="Date", + field_schema={"type": "string"}, + confidence_threshold=0.9, + ), + AssessmentTask( + task_id="task_1", + task_type="attribute", + field_path=("Transactions", 0, "Amount"), + field_name="Amount", + field_schema={"type": "number"}, + confidence_threshold=0.9, + ), + AssessmentTask( + task_id="task_2", + task_type="attribute", + field_path=("Transactions", 1, "Date"), + field_name="Date", + field_schema={"type": "string"}, + confidence_threshold=0.9, + ), + AssessmentTask( + task_id="task_3", + task_type="attribute", + field_path=("Transactions", 1, "Amount"), + field_name="Amount", + field_schema={"type": "number"}, + confidence_threshold=0.9, + ), + ] + + # Create results with flat assessment data (not wrapped by field name) + results = [ + AssessmentResult( + task_id="task_0", + success=True, + assessment_data={ + "confidence": 0.2, + "value": "01/05/2024", + "reasoning": "Date mismatch", + "confidence_threshold": 0.9, + "geometry": [], + }, + confidence_alerts=[], + error_message=None, + processing_time=230.94, + ), + AssessmentResult( + task_id="task_1", + success=True, + assessment_data={ + "confidence": 0.1, + "value": 2500.0, + "reasoning": "Cannot verify amount", + "confidence_threshold": 0.9, + }, + confidence_alerts=[], + error_message=None, + processing_time=216.57, + ), + AssessmentResult( + task_id="task_2", + success=True, + assessment_data={ + "confidence": 0.0, + "value": "01/10/2024", + "reasoning": "Completely fabricated", + "confidence_threshold": 0.9, + }, + confidence_alerts=[], + error_message=None, + processing_time=207.28, + ), + AssessmentResult( + task_id="task_3", + success=True, + assessment_data={ + "confidence": 0.1, + "value": -200.0, + "reasoning": "Cannot verify", + "confidence_threshold": 0.9, + }, + confidence_alerts=[], + error_message=None, + processing_time=225.86, + ), + ] + + # Call the function under test + ( + aggregated_structure, + alerts, + metering, + ) = service._aggregate_assessment_results(tasks, results, assessment_structure) + + # Assertions - check array structure + assert "Transactions" in aggregated_structure + transactions = aggregated_structure["Transactions"] + assert len(transactions) == 2 + + # Check first transaction + assert transactions[0]["Date"] is not None + assert transactions[0]["Date"]["confidence"] == 0.2 + assert transactions[0]["Date"]["value"] == "01/05/2024" + + assert transactions[0]["Amount"] is not None + assert transactions[0]["Amount"]["confidence"] == 0.1 + assert transactions[0]["Amount"]["value"] == 2500.0 + + # Check second transaction + assert transactions[1]["Date"] is not None + assert transactions[1]["Date"]["confidence"] == 0.0 + assert transactions[1]["Date"]["value"] == "01/10/2024" + + assert transactions[1]["Amount"] is not None + assert transactions[1]["Amount"]["confidence"] == 0.1 + assert transactions[1]["Amount"]["value"] == -200.0 diff --git a/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py b/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py index 96db5c1f..3fc0ecba 100644 --- a/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py +++ b/lib/idp_common_pkg/tests/unit/config/test_configuration_sync.py @@ -257,7 +257,7 @@ def test_save_default_triggers_sync(self): dynamodb = boto3.resource("dynamodb", region_name="us-east-1") table_name = "test-config-table" - dynamodb.create_table( + dynamodb.create_table( # pyright: ignore[reportAttributeAccessIssue] TableName=table_name, KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], @@ -298,7 +298,7 @@ def test_save_custom_does_not_trigger_sync(self): dynamodb = boto3.resource("dynamodb", region_name="us-east-1") table_name = "test-config-table" - dynamodb.create_table( + dynamodb.create_table( # pyright: ignore[reportAttributeAccessIssue] TableName=table_name, KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], diff --git a/lib/idp_common_pkg/tests/unit/test_granular_assessment.py b/lib/idp_common_pkg/tests/unit/test_granular_assessment.py index 5df0b926..c9520334 100644 --- a/lib/idp_common_pkg/tests/unit/test_granular_assessment.py +++ b/lib/idp_common_pkg/tests/unit/test_granular_assessment.py @@ -155,8 +155,8 @@ def test_create_assessment_tasks_simple_attributes( # All tasks should have field_path as tuple assert all(isinstance(t.field_path, tuple) for t in tasks) - # All tasks should have parent_assessment_dict reference - assert all(t.parent_assessment_dict is not None for t in tasks) + # All tasks should have confidence_threshold set + assert all(t.confidence_threshold > 0 for t in tasks) # Check that assessment_structure mirrors extraction_results assert isinstance(assessment_structure, dict) @@ -268,7 +268,7 @@ def test_aggregate_assessment_results_new_approach(self, sample_config): "date": None, } - # Create tasks with new structure + # Create tasks with new structure (field_path used for navigation) task1 = AssessmentTask( task_id="task_0", task_type="attribute", @@ -276,7 +276,6 @@ def test_aggregate_assessment_results_new_approach(self, sample_config): field_name="sender_name", field_schema={"type": "string"}, confidence_threshold=0.9, - parent_assessment_dict=assessment_structure, ) task2 = AssessmentTask( @@ -286,7 +285,6 @@ def test_aggregate_assessment_results_new_approach(self, sample_config): field_name="recipient_name", field_schema={"type": "string"}, confidence_threshold=0.9, - parent_assessment_dict=assessment_structure, ) task3 = AssessmentTask( @@ -296,7 +294,6 @@ def test_aggregate_assessment_results_new_approach(self, sample_config): field_name="date", field_schema={"type": "string"}, confidence_threshold=0.9, - parent_assessment_dict=assessment_structure, ) # Create results diff --git a/lib/idp_common_pkg/verify_stickler.py b/lib/idp_common_pkg/verify_stickler.py index 42d82917..3323644f 100644 --- a/lib/idp_common_pkg/verify_stickler.py +++ b/lib/idp_common_pkg/verify_stickler.py @@ -10,7 +10,7 @@ """ import sys -from typing import Any, dict +from typing import Any def verify_stickler_import(): From 5a066fc493695b8ea43d5521be0d3e0ea45a3c86 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 22:40:47 +0000 Subject: [PATCH 21/30] fix failing test --- lib/idp_common_pkg/idp_common/assessment/service.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/service.py b/lib/idp_common_pkg/idp_common/assessment/service.py index de010bdb..0e64dc7f 100644 --- a/lib/idp_common_pkg/idp_common/assessment/service.py +++ b/lib/idp_common_pkg/idp_common/assessment/service.py @@ -511,8 +511,8 @@ def _load_extraction_data(self, section) -> ExtractionData: extraction_results = extraction_data.get("inference_result", {}) if not extraction_results: - raise ValueError( - f"No extraction results found for section {section.section_id}" + logger.warning( + f"No extraction results found for section {section.section_id}, skipping assessment" ) return ExtractionData( @@ -915,6 +915,13 @@ def process_document_section(self, document: Document, section_id: str) -> Docum t1 = time.time() logger.info(f"Time taken to load extraction data: {t1 - t0:.2f} seconds") + # Handle empty extraction results gracefully + if not extraction_results: + logger.warning( + f"No extraction results for section {section_id}, skipping assessment" + ) + return document + # Load document content (text, images, OCR confidence) document_content = self._load_document_content(document, section) t2 = time.time() From f3ab5981d601db96fbbc36281600efc2a37457e6 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 22:42:14 +0000 Subject: [PATCH 22/30] import fix --- scripts/test_grid_overlay.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/test_grid_overlay.py b/scripts/test_grid_overlay.py index 1cfeb691..3b5ab726 100644 --- a/scripts/test_grid_overlay.py +++ b/scripts/test_grid_overlay.py @@ -28,7 +28,7 @@ # Add the library to path sys.path.insert(0, str(Path(__file__).parent.parent / "lib" / "idp_common_pkg")) -from idp_common.grid_overlay import ( +from idp_common.utils.grid_overlay import ( add_ruler_edges, draw_bounding_boxes, add_ruler_and_draw_boxes, @@ -44,7 +44,7 @@ def convert_pdf_to_image(pdf_path: str) -> bytes: page = doc.load_page(0) # First page # Render at 150 DPI for good quality - pix = page.get_pixmap(dpi=150) + pix = page.get_pixmap(dpi=150) # pyright: ignore[reportAttributeAccessIssue] return pix.tobytes("jpeg") except ImportError: print("ERROR: PyMuPDF (fitz) is required for PDF conversion.") From 29b477313b74c3374984c30abc7d8199c9dc7a1e Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 24 Nov 2025 22:56:33 +0000 Subject: [PATCH 23/30] cleanup: remove artifacts and redundant code from PR review --- lib/idp_common_pkg/idp_common/assessment/granular_service.py | 2 +- lib/idp_common_pkg/idp_common/assessment/strands_executor.py | 2 -- lib/idp_common_pkg/idp_common/assessment/strands_service.py | 3 +-- 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index acb338a4..6cef8a2f 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -126,7 +126,7 @@ def __init__( dynamodb: DynamoDBServiceResource = boto3.resource( "dynamodb", region_name=self.region - ) # pyright: ignore[reportAssignmentType]modb", region_name=self.region) + ) # pyright: ignore[reportAssignmentType] self.cache_table = dynamodb.Table(self.cache_table_name) logger.info( f"Granular assessment caching enabled using table: {self.cache_table_name}" diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py index 7b0d3015..8e478e48 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py @@ -39,7 +39,6 @@ async def execute_tasks_async( Args: tasks: List of assessment tasks to execute - base_content: Base prompt content extraction_results: Full extraction results page_images: List of page images (with grid overlay) sorted_page_ids: List of page IDs @@ -166,7 +165,6 @@ def execute_assessment_tasks_parallel( Args: tasks: List of assessment tasks - base_content: Base prompt content extraction_results: Full extraction results page_images: List of page images (with grid overlay already applied) sorted_page_ids: List of page IDs in sorted order diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index e6f997ee..1b0d13c7 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -95,8 +95,7 @@ async def assess_attribute_with_strands( system_prompt=enhanced_system_prompt, state={ "task": task.model_dump(), - "extraction_results": extraction_results, - "assessment_output": None, + "assessment_output": None, # Will be populated by submit_assessment tool }, conversation_manager=SummarizingConversationManager( summary_ratio=0.8, preserve_recent_messages=1 From 958d775df64e2876f08d014a9c05b515bc8b3e76 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 26 Nov 2025 14:18:11 +0000 Subject: [PATCH 24/30] update tests to pass --- .../idp_common/config/models.py | 7 ----- .../test_classification_service.py | 3 ++- .../tests/unit/ocr/test_ocr_service.py | 26 +++++++++++-------- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 1e1c3126..60dcefce 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -133,13 +133,6 @@ def parse_int(cls, v: Any) -> int: return int(v) if v else 0 return int(v) - @model_validator(mode="after") - def model_validator(self) -> Self: - if not self.agentic.review_agent_model: - self.agentic.review_agent_model = self.model - - return self - @model_validator(mode="after") def set_default_review_agent_model(self) -> Self: """Set review_agent_model to extraction model if not specified.""" diff --git a/lib/idp_common_pkg/tests/unit/classification/test_classification_service.py b/lib/idp_common_pkg/tests/unit/classification/test_classification_service.py index 3a294bbd..e7100539 100644 --- a/lib/idp_common_pkg/tests/unit/classification/test_classification_service.py +++ b/lib/idp_common_pkg/tests/unit/classification/test_classification_service.py @@ -291,7 +291,8 @@ def test_classify_page_bedrock_success( # Verify calls mock_get_text.assert_called_once_with("s3://bucket/text.txt") - mock_prepare_image.assert_called_once_with("s3://bucket/image.jpg", None, None) + # ImageConfig defaults to 1200x1200 when not explicitly configured + mock_prepare_image.assert_called_once_with("s3://bucket/image.jpg", 1200, 1200) mock_prepare_bedrock_image.assert_called_once_with(b"image_data") mock_invoke.assert_called_once() diff --git a/lib/idp_common_pkg/tests/unit/ocr/test_ocr_service.py b/lib/idp_common_pkg/tests/unit/ocr/test_ocr_service.py index b9a9d748..da26492f 100644 --- a/lib/idp_common_pkg/tests/unit/ocr/test_ocr_service.py +++ b/lib/idp_common_pkg/tests/unit/ocr/test_ocr_service.py @@ -106,10 +106,10 @@ def test_init_textract_backend_default(self): assert service.max_workers == 20 assert service.dpi is None # Default is None assert service.enhanced_features is False - # Default image sizing + # Default image sizing (from ImageConfig defaults) assert service.resize_config == { - "target_width": 951, - "target_height": 1268, + "target_width": 1200, + "target_height": 1200, } assert service.preprocessing_config is None @@ -198,10 +198,10 @@ def test_init_config_pattern_default_sizing(self): with patch("boto3.client"): service = OcrService(config=config) - # Verify defaults are applied + # Verify defaults are applied (from ImageConfig model defaults) assert service.resize_config == { - "target_width": 951, - "target_height": 1268, + "target_width": 1200, + "target_height": 1200, } assert service.dpi == 200 @@ -242,7 +242,8 @@ def test_init_config_pattern_empty_strings_apply_defaults(self): with patch("boto3.client"): service = OcrService(config=config) - # Verify defaults are applied (empty strings treated same as None) + # Verify OCR service defaults are applied (empty strings treated same as None) + # Note: Empty strings bypass Pydantic defaults, triggering OCR service defaults assert service.resize_config == { "target_width": 951, "target_height": 1268, @@ -250,13 +251,13 @@ def test_init_config_pattern_empty_strings_apply_defaults(self): assert service.dpi == 150 def test_init_config_pattern_partial_sizing(self): - """Test initialization with partial sizing configuration preserves existing behavior.""" + """Test initialization with partial sizing configuration uses explicit width and default height.""" config = { "ocr": { "image": { "dpi": 150, "target_width": 800, - # target_height missing - should disable defaults + # target_height missing - Pydantic model provides default of 1200 } } } @@ -264,8 +265,11 @@ def test_init_config_pattern_partial_sizing(self): with patch("boto3.client"): service = OcrService(config=config) - # Verify partial config disables defaults - assert service.resize_config is None + # Verify explicit width is used with default height from ImageConfig model + assert service.resize_config == { + "target_width": 800, + "target_height": 1200, + } assert service.dpi == 150 def test_init_config_pattern_invalid_sizing_fallback(self): From 049fb7449c0b2011956b5ede883f79979ba17410 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 26 Nov 2025 14:36:26 +0000 Subject: [PATCH 25/30] fixes --- .../pattern-3/rvl-cdip-package-sample/config.yaml | 5 ----- .../idp_common/assessment/granular_service.py | 9 +++++---- lib/idp_common_pkg/idp_common/extraction/agentic_idp.py | 2 +- patterns/pattern-2/src/assessment_function/index.py | 2 +- patterns/pattern-3/src/assessment_function/index.py | 2 +- 5 files changed, 8 insertions(+), 12 deletions(-) diff --git a/config_library/pattern-3/rvl-cdip-package-sample/config.yaml b/config_library/pattern-3/rvl-cdip-package-sample/config.yaml index 2ab91dba..913899c5 100644 --- a/config_library/pattern-3/rvl-cdip-package-sample/config.yaml +++ b/config_library/pattern-3/rvl-cdip-package-sample/config.yaml @@ -932,11 +932,6 @@ assessment: image: target_height: "" target_width: "" - granular: - enabled: true - max_workers: "20" - simple_batch_size: "3" - list_batch_size: "1" default_confidence_threshold: "0.8" top_p: "0.0" max_tokens: "10000" diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 6cef8a2f..d2b42e0b 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -1094,9 +1094,10 @@ def process_document_section(self, document: Document, section_id: str) -> Docum if ( document.status == Status.FAILED and document.errors - and not hasattr(document, "metadata") - or not document.metadata - or "failed_assessment_tasks" not in document.metadata + and ( + not document.metadata + or "failed_assessment_tasks" not in document.metadata + ) ): # Check if any errors contain throttling keywords throttling_keywords = [ @@ -1162,7 +1163,7 @@ def assess_document(self, document: Document) -> Document: def _handle_parsing_errors( self, document: Document, - failed_tasks: list[str], + failed_tasks: list[AssessmentResult], document_text: str, extraction_results: dict, ) -> str | None: diff --git a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py index 6154f185..c24769f5 100644 --- a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py +++ b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py @@ -467,7 +467,7 @@ def patch_buffer_data(patches: list[dict[str, Any]], agent: Agent) -> str: logger.info(f"Current length of buffer data {len(patched_data)} ") - return f"Successfully patched {str(patched_data)[100:]}...." + return f"Successfully patched {str(patched_data)[:100]}...." SYSTEM_PROMPT = """ diff --git a/patterns/pattern-2/src/assessment_function/index.py b/patterns/pattern-2/src/assessment_function/index.py index 18d3b281..dec12139 100644 --- a/patterns/pattern-2/src/assessment_function/index.py +++ b/patterns/pattern-2/src/assessment_function/index.py @@ -6,7 +6,7 @@ import time import logging -from idp_common import get_config, assessment +from idp_common import get_config from idp_common.models import Document, Status from idp_common.docs_service import create_document_service from idp_common import s3 diff --git a/patterns/pattern-3/src/assessment_function/index.py b/patterns/pattern-3/src/assessment_function/index.py index 9411d028..432385a1 100644 --- a/patterns/pattern-3/src/assessment_function/index.py +++ b/patterns/pattern-3/src/assessment_function/index.py @@ -174,7 +174,7 @@ def handler(event, context): logger.info(f"Starting assessment for section {section_id}") updated_document = assessment_service.process_document_section(document, section_id) t1 = time.time() - logger.info(f"Total extraction time: {t1 - t0:.2f} seconds") + logger.info(f"Total assessment time: {t1 - t0:.2f} seconds") # Check if document processing failed if updated_document.status == Status.FAILED: From 0777e5a3682e65872ab7652ac6bf7a7dac1ecfd8 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Thu, 27 Nov 2025 14:16:51 +0000 Subject: [PATCH 26/30] fix the ruler offset --- .../idp_common/assessment/models.py | 23 +- .../idp_common/assessment/strands_models.py | 1 - .../idp_common/assessment/strands_service.py | 1 - .../idp_common/assessment/strands_tools.py | 5 +- .../idp_common/config/models.py | 4 +- .../idp_common/utils/grid_overlay.py | 29 +- .../document-viewer/VisualEditorModal.jsx | 336 +++++++++--------- 7 files changed, 194 insertions(+), 205 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index 11b0adb8..3678e068 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -106,10 +106,15 @@ class BoundingBoxCoordinates(BaseModel): @classmethod def from_corners( - cls, x1: float, y1: float, x2: float, y2: float, scale: float = 1000.0 + cls, + x1: float, + y1: float, + x2: float, + y2: float, + scale: float = 1000.0, ) -> "BoundingBoxCoordinates": """ - Create from corner coordinates. + Create from corner coordinates in document space. Args: x1, y1: Top-left corner in 0-scale range @@ -129,6 +134,12 @@ def from_corners( width = (x2 - x1) / scale height = (y2 - y1) / scale + # Clamp to valid range + left = min(max(left, 0.0), 1.0) + top = min(max(top, 0.0), 1.0) + width = min(width, 1.0 - left) + height = min(height, 1.0 - top) + return cls(top=top, left=left, width=width, height=height) @@ -201,7 +212,6 @@ class FieldAssessmentData(BaseModel): """ confidence: float = Field(..., ge=0.0, le=1.0) - value: Any = Field(None, description="The extracted value") reasoning: str = Field(..., description="Confidence reasoning") confidence_threshold: float = Field(..., ge=0.0, le=1.0) geometry: list[Geometry] | None = Field( @@ -213,7 +223,6 @@ class FieldAssessmentData(BaseModel): def from_llm_response( cls, confidence: float, - value: Any, reasoning: str, confidence_threshold: float, bbox_coords: list[float] | None = None, @@ -227,7 +236,6 @@ def from_llm_response( return cls( confidence=confidence, - value=value, reasoning=reasoning, confidence_threshold=confidence_threshold, geometry=geometry, @@ -235,10 +243,9 @@ def from_llm_response( def to_explainability_format(self) -> dict[str, Any]: """Convert to explainability_info format for frontend.""" - result = { + result: dict[str, Any] = { "confidence": self.confidence, - "value": self.value, - "reasoning": self.reasoning, + "confidence_reason": self.reasoning, "confidence_threshold": self.confidence_threshold, } diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_models.py b/lib/idp_common_pkg/idp_common/assessment/strands_models.py index ba04ac65..a971de03 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_models.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_models.py @@ -40,7 +40,6 @@ def to_geometry(self) -> dict[str, Any]: class ConfidenceAssessment(BaseModel): """Confidence assessment for an attribute value.""" - value: Any = Field(..., description="The extracted value") confidence: float = Field(..., ge=0.0, le=1.0, description="Confidence score 0-1") reasoning: str = Field(..., description="Explanation for the confidence score") bounding_box: BoundingBox | None = Field( diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index 1b0d13c7..460c32fd 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -416,7 +416,6 @@ def _convert_to_assessment_result( # Create standardized field assessment data field_data = FieldAssessmentData.from_llm_response( confidence=assessment.confidence, - value=assessment.value, reasoning=assessment.reasoning, confidence_threshold=task.confidence_threshold, bbox_coords=( diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index de79e59b..e0f74a15 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -115,13 +115,12 @@ def view_image(input_data: ViewImageInput, agent: Agent) -> dict: "color": "red", } - # Draw the bounding box on the image (which already has ruler) + # Draw the bounding box on the image (which has 30px margin for ruler) # Let drawing errors propagate - if we can't draw, something is wrong img_bytes = draw_bounding_boxes( img_bytes, [bbox_dict], - has_ruler=True, - ruler_width=30, + margin_offset=30, ) logger.debug( diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index 60dcefce..ef5c5b15 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -141,7 +141,6 @@ def set_default_review_agent_model(self) -> Self: return self - @model_validator(mode="after") def set_default_review_agent_model(self) -> Self: """Set review_agent_model to extraction model if not specified.""" @@ -231,7 +230,8 @@ class AssessmentConfig(BaseModel): enabled: bool = Field(default=True, description="Enable assessment") model: Optional[str] = Field( - default=None, description="Bedrock model ID for assessment" + default="us.anthropic.claude-haiku-4-5-20251001-v1:0", + description="Bedrock model ID for assessment", ) system_prompt: str = Field( default="You are a document analysis assessment expert. Your role is to evaluate the confidence and accuracy of data extraction results by analyzing them against source documents.\n\nProvide accurate confidence scores for each assessment.", diff --git a/lib/idp_common_pkg/idp_common/utils/grid_overlay.py b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py index 51c1c802..c2fd05e1 100644 --- a/lib/idp_common_pkg/idp_common/utils/grid_overlay.py +++ b/lib/idp_common_pkg/idp_common/utils/grid_overlay.py @@ -147,8 +147,7 @@ def add_ruler_edges( def draw_bounding_boxes( image_data: bytes, bboxes: list[dict], - has_ruler: bool = False, - ruler_width: int = 30, + margin_offset: int = 0, box_color: str = "red", box_width: int = 3, label_font_size: int = 12, @@ -160,12 +159,11 @@ def draw_bounding_boxes( Args: image_data: Raw image bytes bboxes: List of bounding box dictionaries, each containing: - - 'bbox': [x1, y1, x2, y2] in 0-1000 normalized scale + - 'bbox': [x1, y1, x2, y2] in 0-1000 normalized scale (document space) - 'label': Optional label text for the box - 'color': Optional color override for this box - 'page': Optional page number (for multi-page docs) - has_ruler: If True, account for ruler margins in coordinate calculation - ruler_width: Width of ruler margin (only used if has_ruler=True) + margin_offset: Pixel offset for top-left margin (e.g., if image has decorative margins) box_color: Default color for bounding boxes box_width: Line width for bounding boxes label_font_size: Font size for box labels @@ -191,17 +189,11 @@ def draw_bounding_boxes( image = Image.open(io.BytesIO(image_data)).convert("RGBA") width, height = image.size - # If image has ruler edges, calculate the actual document area - if has_ruler: - doc_width = width - ruler_width - doc_height = height - ruler_width - offset_x = ruler_width - offset_y = ruler_width - else: - doc_width = width - doc_height = height - offset_x = 0 - offset_y = 0 + # Calculate document area (excluding any margin offset) + doc_width = width - margin_offset + doc_height = height - margin_offset + offset_x = margin_offset + offset_y = margin_offset # Create overlay for semi-transparent boxes overlay = Image.new("RGBA", (width, height), (0, 0, 0, 0)) @@ -331,12 +323,11 @@ def add_ruler_and_draw_boxes( label_interval=label_interval, ) - # Then draw bounding boxes (accounting for ruler offset) + # Then draw bounding boxes (accounting for margin offset from ruler) result = draw_bounding_boxes( image_with_ruler, bboxes, - has_ruler=True, - ruler_width=ruler_width, + margin_offset=ruler_width, box_color=box_color, box_width=box_width, ) diff --git a/src/ui/src/components/document-viewer/VisualEditorModal.jsx b/src/ui/src/components/document-viewer/VisualEditorModal.jsx index 879a78aa..5e8f4265 100644 --- a/src/ui/src/components/document-viewer/VisualEditorModal.jsx +++ b/src/ui/src/components/document-viewer/VisualEditorModal.jsx @@ -24,136 +24,136 @@ import { getFieldConfidenceInfo } from '../common/confidence-alerts-utils'; const logger = new ConsoleLogger('VisualEditorModal'); -// Memoized component to render a bounding box on an image -const BoundingBox = memo(({ box, page, currentPage, imageRef, zoomLevel = 1, panOffset = { x: 0, y: 0 } }) => { - const [dimensions, setDimensions] = useState({ width: 0, height: 0 }); +// Component that renders document image with bounding box overlay +// The bounding box is drawn on a canvas that sits directly on top of the image +// Both are wrapped in a container that handles transforms together +const DocumentImageWithOverlay = memo( + React.forwardRef( + ({ src, alt, geometry, maxHeight = 'min(70vh, 700px)', zoomLevel = 1, panOffset = { x: 0, y: 0 }, onError }, ref) => { + const containerRef = useRef(null); + const canvasRef = useRef(null); + const internalImageRef = useRef(null); + const [imageDimensions, setImageDimensions] = useState({ width: 0, height: 0 }); + + // Expose the image ref to parent component + React.useImperativeHandle(ref, () => internalImageRef.current); + + // Draw bounding box on canvas whenever geometry or dimensions change + useEffect(() => { + const canvas = canvasRef.current; + if (!canvas || !imageDimensions.width || !imageDimensions.height) { + logger.debug('DocumentImageWithOverlay - Canvas draw skipped:', { + hasCanvas: !!canvas, + imageDimensions, + geometry, + }); + return; + } - useEffect(() => { - if (imageRef.current && page === currentPage) { - const updateDimensions = () => { - const img = imageRef.current; - const rect = img.getBoundingClientRect(); - const containerRect = img.parentElement.getBoundingClientRect(); - - // Get the actual displayed dimensions and position after all transforms - const transformedWidth = rect.width; - const transformedHeight = rect.height; - const transformedOffsetX = rect.left - containerRect.left; - const transformedOffsetY = rect.top - containerRect.top; - - setDimensions({ - transformedWidth, - transformedHeight, - transformedOffsetX, - transformedOffsetY, - }); + // Set canvas internal resolution to match displayed size + // This is critical - canvas.width/height set the drawing resolution + canvas.width = imageDimensions.width; + canvas.height = imageDimensions.height; - logger.debug('VisualEditorModal - BoundingBox dimensions updated:', { - imageWidth: img.width, - imageHeight: img.height, - naturalWidth: img.naturalWidth, - naturalHeight: img.naturalHeight, - offsetX: rect.left - containerRect.left, - offsetY: rect.top - containerRect.top, - imageRect: rect, - containerRect, - }); - }; + const ctx = canvas.getContext('2d'); + // Clear previous drawings + ctx.clearRect(0, 0, canvas.width, canvas.height); - // Update dimensions when image loads - if (imageRef.current.complete && imageRef.current.naturalWidth > 0) { - updateDimensions(); - } else { - imageRef.current.addEventListener('load', updateDimensions); - } - - return () => { - if (imageRef.current) { - imageRef.current.removeEventListener('load', updateDimensions); + if (!geometry?.boundingBox) { + logger.debug('DocumentImageWithOverlay - No geometry to draw:', { geometry }); + return; } - }; - } - return undefined; - }, [imageRef, page, currentPage]); - // Update dimensions when zoom or pan changes - useEffect(() => { - if (imageRef.current && page === currentPage) { - const updateDimensions = () => { - const img = imageRef.current; - const rect = img.getBoundingClientRect(); - const containerRect = img.parentElement.getBoundingClientRect(); - - // Get the actual displayed dimensions and position after all transforms - const transformedWidth = rect.width; - const transformedHeight = rect.height; - const transformedOffsetX = rect.left - containerRect.left; - const transformedOffsetY = rect.top - containerRect.top; - - setDimensions({ - transformedWidth, - transformedHeight, - transformedOffsetX, - transformedOffsetY, + const bbox = geometry.boundingBox; + const padding = 3; + + // Calculate pixel coordinates from normalized values (0-1) + // Coordinates are in document space (0-1), directly mapped to image dimensions + const x = bbox.left * imageDimensions.width - padding; + const y = bbox.top * imageDimensions.height - padding; + const width = bbox.width * imageDimensions.width + padding * 2; + const height = bbox.height * imageDimensions.height + padding * 2; + + // Draw semi-transparent fill + ctx.fillStyle = 'rgba(255, 0, 0, 0.2)'; + ctx.fillRect(x, y, width, height); + + // Draw border + ctx.strokeStyle = 'red'; + ctx.lineWidth = 3; + ctx.strokeRect(x, y, width, height); + + logger.debug('DocumentImageWithOverlay - Drew bounding box:', { + bbox, + pixelCoords: { x, y, width, height }, + canvasDimensions: { width: canvas.width, height: canvas.height }, + imageDimensions, }); - }; - // Delay to allow transforms to complete - const timeoutId = setTimeout(updateDimensions, 150); - // Ensure accuracy after reset - const secondTimeoutId = setTimeout(updateDimensions, 300); - return () => { - clearTimeout(timeoutId); - clearTimeout(secondTimeoutId); - }; - } - return undefined; - }, [zoomLevel, panOffset, imageRef, page, currentPage]); - - if (page !== currentPage || !box || !dimensions.transformedWidth) { - return null; - } - - // Calculate position based on image dimensions with proper zoom and pan handling - if (!box.boundingBox) { - return null; - } - - const padding = 5; - const bbox = box.boundingBox; - - // Calculate position and size directly on the transformed image - const finalLeft = bbox.left * dimensions.transformedWidth + dimensions.transformedOffsetX - padding; - const finalTop = bbox.top * dimensions.transformedHeight + dimensions.transformedOffsetY - padding; - const finalWidth = bbox.width * dimensions.transformedWidth + padding * 2; - const finalHeight = bbox.height * dimensions.transformedHeight + padding * 2; - - // Position the bounding box directly without additional transforms - const style = { - position: 'absolute', - left: `${finalLeft}px`, - top: `${finalTop}px`, - width: `${finalWidth}px`, - height: `${finalHeight}px`, - border: '2px solid red', - pointerEvents: 'none', - zIndex: 10, - transition: 'all 0.1s ease-out', - }; + }, [geometry, imageDimensions]); - logger.debug('VisualEditorModal - BoundingBox style calculated:', { - bbox, - dimensions, - finalLeft, - finalTop, - finalWidth, - finalHeight, - style, - }); + const handleImageLoad = (e) => { + const img = e.target; + // Get the displayed dimensions of the image + const displayedWidth = img.offsetWidth; + const displayedHeight = img.offsetHeight; - return
; -}); + // Get bounding rect to check for any offset + setImageDimensions({ width: displayedWidth, height: displayedHeight }); + }; -BoundingBox.displayName = 'BoundingBox'; + return ( +
+ {alt} + 0 ? `${imageDimensions.width}px` : 'auto', + height: imageDimensions.height > 0 ? `${imageDimensions.height}px` : 'auto', + }} + /> +
+ ); + } + ) +); + +DocumentImageWithOverlay.displayName = 'DocumentImageWithOverlay'; // Memoized component to render a form field based on its type const FormFieldRenderer = memo( @@ -965,39 +965,43 @@ const VisualEditorModal = ({ visible, onDismiss, jsonData, onChange, isReadOnly, } }; + // Determine the geometry to display based on current page + const currentPageIndex = pageIds.indexOf(currentPage) + 1; // 1-based page number + const geometryForCurrentPage = + activeFieldGeometry && activeFieldGeometry.page === currentPageIndex ? activeFieldGeometry : null; + // Create carousel items from page images - const carouselItems = pageIds.map((pageId) => ({ - id: pageId, - content: ( -
1 ? 'grab' : 'default', - }} - onWheel={handleWheel} - > - {pageImages[pageId] ? ( - <> - { + const isCurrentPage = pageId === currentPage; + const pageIndex = pageIds.indexOf(pageId) + 1; + const geometryForPage = activeFieldGeometry && activeFieldGeometry.page === pageIndex ? activeFieldGeometry : null; + + return { + id: pageId, + content: ( +
1 ? 'grab' : 'default', + }} + onWheel={handleWheel} + > + {pageImages[pageId] ? ( + { logger.error(`Error loading image for page ${pageId}:`, e); // Fallback image for error state @@ -1009,26 +1013,16 @@ const VisualEditorModal = ({ visible, onDismiss, jsonData, onChange, isReadOnly, e.target.src = fallbackImage; }} /> - {activeFieldGeometry && ( - - )} - - ) : ( - - -
Loading image...
-
- )} -
- ), - })); + ) : ( + + +
Loading image...
+
+ )} +
+ ), + }; + }); return ( Date: Mon, 1 Dec 2025 10:38:54 +0000 Subject: [PATCH 27/30] encapsulate ruler --- .../idp_common/assessment/granular_service.py | 14 +- .../idp_common/assessment/models.py | 13 +- .../idp_common/assessment/strands_executor.py | 4 +- .../idp_common/assessment/strands_service.py | 17 +- .../idp_common/assessment/strands_tools.py | 33 +- .../test_bounding_box_integration.py | 374 +++++++++--------- .../assessment/test_company_address_fix.py | 6 +- .../test_granular_bbox_conversion.py | 138 +++---- .../test_nested_geometry_conversion.py | 37 +- 9 files changed, 286 insertions(+), 350 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index d2b42e0b..008ac058 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -36,7 +36,6 @@ from idp_common.extraction.models import ExtractionData from idp_common.models import Document, Status from idp_common.utils import check_token_limit -from idp_common.utils.grid_overlay import add_ruler_edges logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) @@ -845,16 +844,8 @@ def process_document_section(self, document: Document, section_id: str) -> Docum f"Found {len(cached_task_results)} cached assessment task results, processing {len(tasks_to_process)} remaining tasks" ) - # Apply grid overlay to page images for assessment - grid_page_images = [] - for idx, page_img in enumerate(page_images): - grid_img = add_ruler_edges(page_img) - logger.info( - f"Added ruler overlay to page {idx}: {len(page_img):,} bytes -> {len(grid_img):,} bytes" - ) - grid_page_images.append(grid_img) - # Execute tasks using Strands-based parallel executor + # Note: ruler overlay is added internally by strands_service/strands_tools logger.info( f"Processing {len(tasks_to_process)} assessment tasks with Strands executor (max_concurrent={self.max_workers})" ) @@ -862,11 +853,12 @@ def process_document_section(self, document: Document, section_id: str) -> Docum request_start_time = time.time() # Call Strands executor - handles both parallel and sequential based on max_concurrent + # Pass raw page_images - ruler overlay is added internally when needed task_results, task_metering, processing_time = ( execute_assessment_tasks_parallel( tasks=tasks_to_process, extraction_results=extraction_results, - page_images=grid_page_images, + page_images=page_images, sorted_page_ids=sorted_page_ids, model_id=self.config.assessment.model, system_prompt=self.config.assessment.system_prompt, diff --git a/lib/idp_common_pkg/idp_common/assessment/models.py b/lib/idp_common_pkg/idp_common/assessment/models.py index 3678e068..b515a72a 100644 --- a/lib/idp_common_pkg/idp_common/assessment/models.py +++ b/lib/idp_common_pkg/idp_common/assessment/models.py @@ -114,7 +114,7 @@ def from_corners( scale: float = 1000.0, ) -> "BoundingBoxCoordinates": """ - Create from corner coordinates in document space. + Create from corner coordinates. Args: x1, y1: Top-left corner in 0-scale range @@ -228,7 +228,16 @@ def from_llm_response( bbox_coords: list[float] | None = None, page_num: int | None = None, ) -> "FieldAssessmentData": - """Create from LLM response data.""" + """ + Create from LLM response data. + + Args: + confidence: Confidence score (0-1) + reasoning: Explanation for confidence + confidence_threshold: Threshold for confidence alerts + bbox_coords: Optional bounding box coordinates [x1, y1, x2, y2] + page_num: Optional page number (1-indexed) + """ geometry = None if bbox_coords is not None and page_num is not None: geom = Geometry.from_bbox_list(bbox_coords, page_num) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py index 8e478e48..a3330304 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py @@ -40,7 +40,7 @@ async def execute_tasks_async( Args: tasks: List of assessment tasks to execute extraction_results: Full extraction results - page_images: List of page images (with grid overlay) + page_images: List of raw page images (ruler overlay added internally) sorted_page_ids: List of page IDs model_id: Model to use system_prompt: System prompt @@ -166,7 +166,7 @@ def execute_assessment_tasks_parallel( Args: tasks: List of assessment tasks extraction_results: Full extraction results - page_images: List of page images (with grid overlay already applied) + page_images: List of raw page images (ruler overlay added internally) sorted_page_ids: List of page IDs in sorted order model_id: Model ID system_prompt: System prompt diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_service.py b/lib/idp_common_pkg/idp_common/assessment/strands_service.py index 460c32fd..4fb26a53 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_service.py @@ -26,6 +26,7 @@ from idp_common.assessment.strands_tools import create_strands_tools from idp_common.bedrock import build_model_config from idp_common.utils.bedrock_utils import async_exponential_backoff_retry +from idp_common.utils.grid_overlay import add_ruler_edges logger = Logger(service="assessment", level=os.getenv("LOG_LEVEL", "INFO")) @@ -49,9 +50,8 @@ async def assess_attribute_with_strands( Args: task: Assessment task to process - base_content: Base prompt content (includes images) extraction_results: Full extraction results - page_images: List of page images (with grid overlay already applied) + page_images: List of raw page images (ruler overlay added internally) sorted_page_ids: List of page IDs in sorted order model_id: Bedrock model ID system_prompt: System prompt for assessment @@ -364,16 +364,21 @@ def _build_task_prompt( Args: task: Assessment task for one specific field - page_images: List of page images to include in the prompt + page_images: List of raw page images (ruler will be added here) Returns: List of content blocks with images and task text """ field_path_str = _convert_field_path_to_string(task.field_path) - # Create image content blocks + # Create image content blocks with ruler overlay + # Rulers are added here so the LLM can see coordinate reference marks image_blocks = [ - ContentBlock(image=ImageContent(format="png", source=ImageSource(bytes=img))) + ContentBlock( + image=ImageContent( + format="png", source=ImageSource(bytes=add_ruler_edges(img)) + ) + ) for img in page_images ] @@ -414,6 +419,8 @@ def _convert_to_assessment_result( assessment = output.assessment # Create standardized field assessment data + # Note: bounding box coordinates are already adjusted for ruler offset + # by the submit_assessment tool in strands_tools.py field_data = FieldAssessmentData.from_llm_response( confidence=assessment.confidence, reasoning=assessment.reasoning, diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index e0f74a15..9c562c25 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -13,7 +13,7 @@ from strands import Agent, tool from idp_common.assessment.strands_models import AssessmentOutput, BoundingBox -from idp_common.utils.grid_overlay import draw_bounding_boxes +from idp_common.utils.grid_overlay import add_ruler_and_draw_boxes, add_ruler_edges from idp_common.utils.strands_agent_tools.todo_list import ( create_todo_list, update_todo, @@ -37,10 +37,13 @@ class ViewImageInput(BaseModel): @tool def submit_assessment(assessment: AssessmentOutput, agent: Agent) -> str: + """Submit the final assessment for a field.""" # Validate assessment structure and return helpful errors validated_assessment = AssessmentOutput.model_validate(assessment) - # Store in agent state + # Store in agent state - coordinates are in 0-1000 document space + # The ruler shows 0-1000 scale mapping to the document, so LLM coordinates + # are already in document space and need no adjustment agent.state.set("assessment_output", validated_assessment.model_dump(mode="json")) logger.info( @@ -56,7 +59,7 @@ def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) Create a view_image tool that has access to page images. Args: - page_images: List of page image bytes (with grid overlay already applied) + page_images: List of raw page image bytes (without ruler overlay) sorted_page_ids: List of page IDs in sorted order Returns: @@ -64,7 +67,7 @@ def create_view_image_tool(page_images: list[bytes], sorted_page_ids: list[str]) """ @tool - def view_image(input_data: ViewImageInput, agent: Agent) -> dict: + def view_image(input_data: ViewImageInput, agent: Agent) -> dict[str, Any]: """ View a specific page image, optionally highlighting a bounding box area. @@ -97,13 +100,13 @@ def view_image(input_data: ViewImageInput, agent: Agent) -> dict: f"Valid range: 0-{len(page_images) - 1}" ) - # Get the base image (already has grid overlay) - img_bytes = page_images[view_input.image_index] + # Get the raw image (no ruler overlay yet) + raw_img_bytes = page_images[view_input.image_index] page_id = sorted_page_ids[view_input.image_index] - # If bounding box is specified, draw it on the image + # Add ruler and optionally draw bounding box if view_input.bounding_box: - # Convert BoundingBox to dict format for draw_bounding_boxes + # Convert BoundingBox to dict format bbox_dict = { "bbox": [ view_input.bounding_box.x1, @@ -115,21 +118,19 @@ def view_image(input_data: ViewImageInput, agent: Agent) -> dict: "color": "red", } - # Draw the bounding box on the image (which has 30px margin for ruler) - # Let drawing errors propagate - if we can't draw, something is wrong - img_bytes = draw_bounding_boxes( - img_bytes, - [bbox_dict], - margin_offset=30, - ) + # Add ruler overlay and draw bounding box in one step + img_bytes = add_ruler_and_draw_boxes(raw_img_bytes, [bbox_dict]) logger.debug( - "Drew bounding box on image", + "Added ruler and drew bounding box on image", extra={ "image_index": view_input.image_index, "bbox": bbox_dict["bbox"], }, ) + else: + # Just add ruler overlay (no bounding box) + img_bytes = add_ruler_edges(raw_img_bytes) logger.info( "Returning image to agent", diff --git a/lib/idp_common_pkg/tests/unit/assessment/test_bounding_box_integration.py b/lib/idp_common_pkg/tests/unit/assessment/test_bounding_box_integration.py index 51e0b079..0d4b3a8f 100644 --- a/lib/idp_common_pkg/tests/unit/assessment/test_bounding_box_integration.py +++ b/lib/idp_common_pkg/tests/unit/assessment/test_bounding_box_integration.py @@ -2,203 +2,193 @@ # SPDX-License-Identifier: MIT-0 """ -Unit tests for bounding box integration in AssessmentService. -""" - -import pytest -from idp_common.assessment.service import AssessmentService - - -class TestBoundingBoxIntegration: - """Test bounding box functionality in AssessmentService.""" - - def test_convert_bbox_to_geometry_valid_coordinates(self): - """Test conversion from bbox coordinates to geometry format.""" - service = AssessmentService() - - # Test normal coordinates - bbox_coords = [100, 200, 300, 400] # x1, y1, x2, y2 in 0-1000 scale - page_num = 1 +Unit tests for bounding box conversion models. - result = service._convert_bbox_to_geometry(bbox_coords, page_num) +These tests validate the coordinate conversion from LLM bounding box responses +(in 0-1000 scale) to normalized 0-1 scale for the UI. - expected = { - "boundingBox": { - "top": 0.2, # y1/1000 - "left": 0.1, # x1/1000 - "width": 0.2, # (x2-x1)/1000 - "height": 0.2, # (y2-y1)/1000 - }, - "page": 1, - } - - assert result == expected +Note: Ruler offset adjustment is handled separately in strands_tools.py when +the LLM submits assessments. The models here work with document-space coordinates. +""" - def test_convert_bbox_to_geometry_reversed_coordinates(self): +import pytest +from idp_common.assessment.models import ( + BoundingBoxCoordinates, + FieldAssessmentData, + Geometry, +) + + +class TestBoundingBoxCoordinates: + """Test BoundingBoxCoordinates model.""" + + def test_from_corners_basic_conversion(self): + """Test basic conversion from corner coordinates to normalized bbox.""" + bbox = BoundingBoxCoordinates.from_corners( + x1=100, + y1=200, + x2=300, + y2=400, + scale=1000.0, + ) + + assert bbox.left == 0.1 + assert bbox.top == 0.2 + assert bbox.width == 0.2 + assert bbox.height == 0.2 + + def test_from_corners_handles_reversed_coordinates(self): """Test that coordinates are corrected when reversed.""" - service = AssessmentService() - - # Test with reversed coordinates (x2 < x1, y2 < y1) - bbox_coords = [300, 400, 100, 200] # Reversed - page_num = 2 - - result = service._convert_bbox_to_geometry(bbox_coords, page_num) + bbox = BoundingBoxCoordinates.from_corners( + x1=300, + y1=400, + x2=100, + y2=200, # Reversed + scale=1000.0, + ) # Should be corrected to proper order - expected = { - "boundingBox": { - "top": 0.2, # min(y1,y2)/1000 - "left": 0.1, # min(x1,x2)/1000 - "width": 0.2, # (max(x)-min(x))/1000 - "height": 0.2, # (max(y)-min(y))/1000 - }, - "page": 2, - } - - assert result == expected - - def test_convert_bbox_to_geometry_invalid_coordinates(self): + assert bbox.left == 0.1 + assert bbox.top == 0.2 + assert bbox.width == 0.2 + assert bbox.height == 0.2 + + def test_from_corners_clamps_to_valid_range(self): + """Test that values are clamped to 0-1 range.""" + # Coordinates that would exceed bounds + bbox = BoundingBoxCoordinates.from_corners( + x1=-50, + y1=-50, + x2=1100, + y2=1100, + scale=1000.0, + ) + + # Should clamp to valid range + assert bbox.left >= 0.0 + assert bbox.top >= 0.0 + assert bbox.left + bbox.width <= 1.0 + assert bbox.top + bbox.height <= 1.0 + + def test_from_corners_edge_coordinates(self): + """Test with coordinates at edges of document.""" + # Full document bbox + bbox = BoundingBoxCoordinates.from_corners( + x1=0, + y1=0, + x2=1000, + y2=1000, + scale=1000.0, + ) + + assert bbox.left == 0.0 + assert bbox.top == 0.0 + assert bbox.width == 1.0 + assert bbox.height == 1.0 + + def test_from_corners_small_region(self): + """Test with a small region.""" + bbox = BoundingBoxCoordinates.from_corners( + x1=500, + y1=500, + x2=510, + y2=510, + scale=1000.0, + ) + + assert bbox.left == 0.5 + assert bbox.top == 0.5 + assert bbox.width == 0.01 + assert bbox.height == 0.01 + + +class TestGeometry: + """Test Geometry model.""" + + def test_from_bbox_list_valid(self): + """Test creation from bbox list format.""" + # LLM response format: [x1, y1, x2, y2] in 0-1000 scale + geom = Geometry.from_bbox_list([100, 200, 300, 400], page_num=1) + + assert geom.page == 1 + assert geom.boundingBox is not None + assert geom.boundingBox.left == 0.1 # 100/1000 + assert geom.boundingBox.top == 0.2 # 200/1000 + + def test_from_bbox_list_different_page(self): + """Test creation with different page number.""" + geom = Geometry.from_bbox_list([100, 200, 300, 400], page_num=3) + + assert geom.page == 3 + assert geom.boundingBox is not None + + def test_from_bbox_list_invalid_length(self): """Test error handling for invalid coordinate count.""" - service = AssessmentService() - - # Test with wrong number of coordinates - bbox_coords = [100, 200, 300] # Only 3 coordinates - page_num = 1 - with pytest.raises(ValueError, match="Expected 4 coordinates"): - service._convert_bbox_to_geometry(bbox_coords, page_num) - - def test_extract_geometry_from_assessment_with_bbox_data(self): - """Test extraction of geometry data from assessment response.""" - service = AssessmentService() - - assessment_data = { - "account_number": { - "confidence": 0.95, - "confidence_reason": "Clear text with high OCR confidence", - "bbox": [100, 200, 300, 400], - "page": 1, - }, - "account_balance": { - "confidence": 0.88, - "confidence_reason": "Good text quality", - "bbox": [400, 500, 600, 550], - "page": 1, - }, - } - - result = service._extract_geometry_from_assessment(assessment_data) - - # Check that bbox/page data was converted to geometry format - assert "geometry" in result["account_number"] - assert "bbox" not in result["account_number"] - assert "page" not in result["account_number"] - - # Check geometry structure - geometry = result["account_number"]["geometry"][0] - assert "boundingBox" in geometry - assert "page" in geometry - assert geometry["page"] == 1 - - # Check bounding box values - bbox = geometry["boundingBox"] - assert bbox["top"] == 0.2 # 200/1000 - assert bbox["left"] == 0.1 # 100/1000 - assert bbox["width"] == 0.2 # (300-100)/1000 - assert bbox["height"] == 0.2 # (400-200)/1000 - - def test_extract_geometry_from_assessment_without_bbox_data(self): - """Test that assessment data without bbox passes through unchanged.""" - service = AssessmentService() - - assessment_data = { - "account_number": { - "confidence": 0.95, - "confidence_reason": "Clear text with high OCR confidence", - # No bbox or page data - } - } - - result = service._extract_geometry_from_assessment(assessment_data) - - # Should pass through unchanged - assert result == assessment_data - assert "geometry" not in result["account_number"] - - def test_extract_geometry_from_assessment_invalid_bbox_format(self): - """Test handling of invalid bbox format.""" - service = AssessmentService() - - assessment_data = { - "account_number": { - "confidence": 0.95, - "confidence_reason": "Clear text", - "bbox": "invalid_format", # Invalid format - "page": 1, - } - } - - result = service._extract_geometry_from_assessment(assessment_data) - - # Should remove invalid bbox data but keep confidence info - assert "geometry" not in result["account_number"] - assert "bbox" not in result["account_number"] - assert "page" not in result["account_number"] - assert result["account_number"]["confidence"] == 0.95 - - def test_extract_geometry_from_assessment_missing_page(self): - """Test handling when bbox exists but page is missing.""" - service = AssessmentService() - - assessment_data = { - "account_number": { - "confidence": 0.95, - "confidence_reason": "Clear text", - "bbox": [100, 200, 300, 400], - # Missing page - } - } - - result = service._extract_geometry_from_assessment(assessment_data) - - # Should not create geometry without page info - assert "geometry" not in result["account_number"] - assert "bbox" not in result["account_number"] - assert result["account_number"]["confidence"] == 0.95 - - def test_extract_geometry_from_assessment_edge_coordinates(self): - """Test conversion with edge case coordinates.""" - service = AssessmentService() - - assessment_data = { - "top_left_field": { - "confidence": 0.9, - "confidence_reason": "Located at top-left", - "bbox": [0, 0, 100, 100], # Top-left corner - "page": 1, - }, - "bottom_right_field": { - "confidence": 0.85, - "confidence_reason": "Located at bottom-right", - "bbox": [900, 900, 1000, 1000], # Bottom-right corner - "page": 2, - }, - } - - result = service._extract_geometry_from_assessment(assessment_data) - - # Check top-left field - top_left_geometry = result["top_left_field"]["geometry"][0] - assert top_left_geometry["boundingBox"]["top"] == 0.0 - assert top_left_geometry["boundingBox"]["left"] == 0.0 - assert top_left_geometry["boundingBox"]["width"] == 0.1 - assert top_left_geometry["boundingBox"]["height"] == 0.1 - assert top_left_geometry["page"] == 1 - - # Check bottom-right field - bottom_right_geometry = result["bottom_right_field"]["geometry"][0] - assert bottom_right_geometry["boundingBox"]["top"] == 0.9 - assert bottom_right_geometry["boundingBox"]["left"] == 0.9 - assert bottom_right_geometry["boundingBox"]["width"] == 0.1 - assert bottom_right_geometry["boundingBox"]["height"] == 0.1 - assert bottom_right_geometry["page"] == 2 + Geometry.from_bbox_list([100, 200, 300], page_num=1) + + def test_to_ui_format(self): + """Test conversion to UI-compatible format.""" + geom = Geometry.from_bbox_list([100, 200, 300, 400], page_num=2) + ui_format = geom.to_ui_format() + + assert "boundingBox" in ui_format + assert "page" in ui_format + assert ui_format["page"] == 2 + assert "top" in ui_format["boundingBox"] + assert "left" in ui_format["boundingBox"] + assert "width" in ui_format["boundingBox"] + assert "height" in ui_format["boundingBox"] + + +class TestFieldAssessmentData: + """Test FieldAssessmentData model.""" + + def test_from_llm_response_with_bbox(self): + """Test creation from LLM response with bounding box.""" + assessment = FieldAssessmentData.from_llm_response( + confidence=0.95, + reasoning="Clear text with high OCR confidence", + confidence_threshold=0.8, + bbox_coords=[100, 200, 300, 400], + page_num=1, + ) + + assert assessment.confidence == 0.95 + assert assessment.reasoning == "Clear text with high OCR confidence" + assert assessment.geometry is not None + assert len(assessment.geometry) == 1 + assert assessment.geometry[0].page == 1 + + def test_from_llm_response_without_bbox(self): + """Test creation from LLM response without bounding box.""" + assessment = FieldAssessmentData.from_llm_response( + confidence=0.85, + reasoning="Good text quality", + confidence_threshold=0.8, + bbox_coords=None, + page_num=None, + ) + + assert assessment.confidence == 0.85 + assert assessment.geometry is None + + def test_to_explainability_format(self): + """Test conversion to explainability format for frontend.""" + assessment = FieldAssessmentData.from_llm_response( + confidence=0.95, + reasoning="Clear text", + confidence_threshold=0.8, + bbox_coords=[100, 200, 300, 400], + page_num=1, + ) + + result = assessment.to_explainability_format() + + assert result["confidence"] == 0.95 + assert result["confidence_reason"] == "Clear text" + assert result["confidence_threshold"] == 0.8 + assert "geometry" in result + assert len(result["geometry"]) == 1 + assert "boundingBox" in result["geometry"][0] + assert result["geometry"][0]["page"] == 1 diff --git a/lib/idp_common_pkg/tests/unit/assessment/test_company_address_fix.py b/lib/idp_common_pkg/tests/unit/assessment/test_company_address_fix.py index 84bc3249..81e9b61d 100644 --- a/lib/idp_common_pkg/tests/unit/assessment/test_company_address_fix.py +++ b/lib/idp_common_pkg/tests/unit/assessment/test_company_address_fix.py @@ -7,13 +7,11 @@ import json -from idp_common.assessment.service import AssessmentService +from idp_common.assessment.geometry_utils import extract_geometry_from_nested_dict def test_company_address_geometry_conversion(): """Test the exact CompanyAddress case provided by the user.""" - service = AssessmentService() - # This is the exact data structure from the user's example assessment_data = { "YTDCityTax": { @@ -49,7 +47,7 @@ def test_company_address_geometry_conversion(): } # Process the assessment data - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) print("=== Before/After Comparison ===") print("\nBEFORE (with bbox):") diff --git a/lib/idp_common_pkg/tests/unit/assessment/test_granular_bbox_conversion.py b/lib/idp_common_pkg/tests/unit/assessment/test_granular_bbox_conversion.py index 9908f73e..cb483a8b 100644 --- a/lib/idp_common_pkg/tests/unit/assessment/test_granular_bbox_conversion.py +++ b/lib/idp_common_pkg/tests/unit/assessment/test_granular_bbox_conversion.py @@ -2,16 +2,15 @@ # SPDX-License-Identifier: MIT-0 """ -Test to verify that both regular and granular assessment services -handle bounding box conversion correctly. +Test to verify that the geometry_utils module correctly handles +bounding box conversion for assessment data. """ -from idp_common.assessment.granular_service import GranularAssessmentService -from idp_common.assessment.service import AssessmentService +from idp_common.assessment.geometry_utils import extract_geometry_from_nested_dict def test_both_services_convert_bbox_to_geometry(): - """Test that both regular and granular services convert bbox to geometry.""" + """Test that geometry_utils converts bbox to geometry correctly.""" # Test data with bbox coordinates mock_assessment_data = { @@ -37,105 +36,56 @@ def test_both_services_convert_bbox_to_geometry(): }, } - print("=== Testing Bounding Box Conversion in Both Services ===") + print("=== Testing Bounding Box Conversion in geometry_utils ===") - # Test regular assessment service - print("\nšŸ“ Testing Regular AssessmentService") - regular_service = AssessmentService() - regular_result = regular_service._extract_geometry_from_assessment( - mock_assessment_data - ) + # Test geometry conversion using geometry_utils + print("\nšŸ“ Testing extract_geometry_from_nested_dict") + result = extract_geometry_from_nested_dict(mock_assessment_data) # Check YTDNetPay conversion - regular_ytd = regular_result["YTDNetPay"] - regular_ytd_has_geometry = "geometry" in regular_ytd - regular_ytd_has_bbox = "bbox" in regular_ytd + ytd = result["YTDNetPay"] + ytd_has_geometry = "geometry" in ytd + ytd_has_bbox = "bbox" in ytd - print( - f"Regular Service - YTDNetPay: geometry={regular_ytd_has_geometry}, bbox={regular_ytd_has_bbox}" - ) + print(f"YTDNetPay: geometry={ytd_has_geometry}, bbox={ytd_has_bbox}") # Check CompanyAddress.State conversion - regular_state = regular_result["CompanyAddress"]["State"] - regular_state_has_geometry = "geometry" in regular_state - regular_state_has_bbox = "bbox" in regular_state + state = result["CompanyAddress"]["State"] + state_has_geometry = "geometry" in state + state_has_bbox = "bbox" in state - print( - f"Regular Service - CompanyAddress.State: geometry={regular_state_has_geometry}, bbox={regular_state_has_bbox}" - ) + print(f"CompanyAddress.State: geometry={state_has_geometry}, bbox={state_has_bbox}") - # Test granular assessment service - print("\nšŸ“ Testing GranularAssessmentService") - granular_service = GranularAssessmentService() - granular_result = granular_service._extract_geometry_from_assessment( - mock_assessment_data - ) - - # Check YTDNetPay conversion - granular_ytd = granular_result["YTDNetPay"] - granular_ytd_has_geometry = "geometry" in granular_ytd - granular_ytd_has_bbox = "bbox" in granular_ytd - - print( - f"Granular Service - YTDNetPay: geometry={granular_ytd_has_geometry}, bbox={granular_ytd_has_bbox}" - ) - - # Check CompanyAddress.State conversion - granular_state = granular_result["CompanyAddress"]["State"] - granular_state_has_geometry = "geometry" in granular_state - granular_state_has_bbox = "bbox" in granular_state - - print( - f"Granular Service - CompanyAddress.State: geometry={granular_state_has_geometry}, bbox={granular_state_has_bbox}" - ) - - # Verify both services work identically + # Verify conversion print("\nšŸ” Verification:") - # Both should convert bbox to geometry - assert regular_ytd_has_geometry, ( - "Regular service should convert YTDNetPay bbox to geometry" - ) - assert not regular_ytd_has_bbox, ( - "Regular service should remove YTDNetPay bbox after conversion" - ) - assert granular_ytd_has_geometry, ( - "Granular service should convert YTDNetPay bbox to geometry" - ) - assert not granular_ytd_has_bbox, ( - "Granular service should remove YTDNetPay bbox after conversion" - ) - - # Both should handle nested attributes - assert regular_state_has_geometry, ( - "Regular service should convert nested State bbox to geometry" - ) - assert not regular_state_has_bbox, ( - "Regular service should remove nested State bbox after conversion" - ) - assert granular_state_has_geometry, ( - "Granular service should convert nested State bbox to geometry" - ) - assert not granular_state_has_bbox, ( - "Granular service should remove nested State bbox after conversion" - ) - - # Check geometry values are equivalent - regular_ytd_geometry = regular_ytd["geometry"][0]["boundingBox"] - granular_ytd_geometry = granular_ytd["geometry"][0]["boundingBox"] - - assert regular_ytd_geometry == granular_ytd_geometry, ( - "Both services should produce identical geometry" - ) - - print("āœ… Regular AssessmentService: Converts bbox → geometry correctly") - print("āœ… GranularAssessmentService: Converts bbox → geometry correctly") - print("āœ… Both services handle nested attributes (CompanyAddress.State)") - print("āœ… Both services produce identical geometry output") - print("āœ… Both services remove raw bbox data after conversion") - - print("\nšŸŽ‰ Both services now support automatic bounding box conversion!") - print("Your deployed stack with granular assessment will now work correctly!") + # Should convert bbox to geometry + assert ytd_has_geometry, "Should convert YTDNetPay bbox to geometry" + assert not ytd_has_bbox, "Should remove YTDNetPay bbox after conversion" + + # Should handle nested attributes + assert state_has_geometry, "Should convert nested State bbox to geometry" + assert not state_has_bbox, "Should remove nested State bbox after conversion" + + # Check geometry values are correct + ytd_geometry = ytd["geometry"][0]["boundingBox"] + assert ytd_geometry["top"] == 0.333 # 333/1000 + assert ytd_geometry["left"] == 0.443 # 443/1000 + assert ytd_geometry["width"] == 0.064 # (507-443)/1000 + assert ytd_geometry["height"] == 0.012 # (345-333)/1000 + + state_geometry = state["geometry"][0]["boundingBox"] + assert state_geometry["top"] == 0.116 # 116/1000 + assert state_geometry["left"] == 0.23 # 230/1000 + assert state_geometry["width"] == 0.029 # (259-230)/1000 + assert state_geometry["height"] == 0.01 # (126-116)/1000 + + print("āœ… Converts bbox → geometry correctly") + print("āœ… Handles nested attributes (CompanyAddress.State)") + print("āœ… Removes raw bbox data after conversion") + print("āœ… Produces correct normalized geometry values") + + print("\nšŸŽ‰ geometry_utils correctly supports automatic bounding box conversion!") return True diff --git a/lib/idp_common_pkg/tests/unit/assessment/test_nested_geometry_conversion.py b/lib/idp_common_pkg/tests/unit/assessment/test_nested_geometry_conversion.py index a739fd2e..bf51621c 100644 --- a/lib/idp_common_pkg/tests/unit/assessment/test_nested_geometry_conversion.py +++ b/lib/idp_common_pkg/tests/unit/assessment/test_nested_geometry_conversion.py @@ -2,11 +2,14 @@ # SPDX-License-Identifier: MIT-0 """ -Unit tests for nested geometry conversion in AssessmentService. +Unit tests for nested geometry conversion in geometry_utils. Tests the recursive processing of group attributes with bounding boxes. """ -from idp_common.assessment.service import AssessmentService +from idp_common.assessment.geometry_utils import ( + extract_geometry_from_nested_dict, + process_assessment_geometry, +) class TestNestedGeometryConversion: @@ -14,8 +17,6 @@ class TestNestedGeometryConversion: def test_simple_attribute_geometry_conversion(self): """Test that simple attributes still work correctly.""" - service = AssessmentService() - assessment_data = { "YTDCityTax": { "confidence": 1.0, @@ -25,7 +26,7 @@ def test_simple_attribute_geometry_conversion(self): } } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) # Check conversion worked assert "geometry" in result["YTDCityTax"] @@ -45,8 +46,6 @@ def test_simple_attribute_geometry_conversion(self): def test_nested_group_attribute_geometry_conversion(self): """Test that nested group attributes are processed recursively.""" - service = AssessmentService() - assessment_data = { "CompanyAddress": { "State": { @@ -64,7 +63,7 @@ def test_nested_group_attribute_geometry_conversion(self): } } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) # Check that nested attributes were processed company_address = result["CompanyAddress"] @@ -90,8 +89,6 @@ def test_nested_group_attribute_geometry_conversion(self): def test_mixed_attributes_with_and_without_geometry(self): """Test processing of mixed attributes - some with geometry, some without.""" - service = AssessmentService() - assessment_data = { "currency": { "confidence": 0.0, @@ -119,7 +116,7 @@ def test_mixed_attributes_with_and_without_geometry(self): }, } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) # currency should pass through unchanged (no geometry) assert "geometry" not in result["currency"] @@ -141,8 +138,6 @@ def test_mixed_attributes_with_and_without_geometry(self): def test_list_attributes_with_nested_geometry(self): """Test processing of list attributes where each item may have geometry.""" - service = AssessmentService() - assessment_data = { "Transactions": [ { @@ -175,7 +170,7 @@ def test_list_attributes_with_nested_geometry(self): ] } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) transactions = result["Transactions"] assert len(transactions) == 2 @@ -196,8 +191,6 @@ def test_list_attributes_with_nested_geometry(self): def test_deeply_nested_group_attributes(self): """Test deeply nested group attributes.""" - service = AssessmentService() - assessment_data = { "EmployeeInfo": { "PersonalDetails": { @@ -224,7 +217,7 @@ def test_deeply_nested_group_attributes(self): } } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) employee_info = result["EmployeeInfo"] @@ -247,8 +240,6 @@ def test_deeply_nested_group_attributes(self): def test_invalid_nested_bbox_data(self): """Test handling of invalid bbox data in nested attributes.""" - service = AssessmentService() - assessment_data = { "CompanyAddress": { "State": { @@ -266,7 +257,7 @@ def test_invalid_nested_bbox_data(self): } } - result = service._extract_geometry_from_assessment(assessment_data) + result = extract_geometry_from_nested_dict(assessment_data) # Both should have invalid bbox data removed but confidence preserved state = result["CompanyAddress"]["State"] @@ -281,9 +272,7 @@ def test_invalid_nested_bbox_data(self): assert zip_code["confidence"] == 0.85 def test_process_single_assessment_geometry_method(self): - """Test the helper method for processing single assessments.""" - service = AssessmentService() - + """Test the helper function for processing single assessments.""" # Test valid geometry conversion assessment = { "confidence": 0.95, @@ -292,7 +281,7 @@ def test_process_single_assessment_geometry_method(self): "page": 1, } - result = service._process_single_assessment_geometry(assessment, "test_field") + result = process_assessment_geometry(assessment, "test_field") assert "geometry" in result assert "bbox" not in result From dd50777cd42e0461055f80292dc4da37e125cede Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 1 Dec 2025 13:37:08 +0000 Subject: [PATCH 28/30] add structured loggin --- lib/idp_common_pkg/idp_common/assessment/strands_tools.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 9c562c25..57f5b220 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -125,7 +125,13 @@ def view_image(input_data: ViewImageInput, agent: Agent) -> dict[str, Any]: "Added ruler and drew bounding box on image", extra={ "image_index": view_input.image_index, - "bbox": bbox_dict["bbox"], + "page_id": page_id, + "x1": view_input.bounding_box.x1, + "y1": view_input.bounding_box.y1, + "x2": view_input.bounding_box.x2, + "y2": view_input.bounding_box.y2, + "page": view_input.bounding_box.page, + "label": view_input.label, }, ) else: From e21efb890dada74cb7b9d04bc458813eb56b9c10 Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Mon, 1 Dec 2025 14:05:35 +0000 Subject: [PATCH 29/30] improve retry mechanism --- lib/idp_common_pkg/idp_common/utils/bedrock_utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py b/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py index 09b61c45..987e2e10 100644 --- a/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py +++ b/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py @@ -19,6 +19,7 @@ InvokeModelRequestTypeDef, InvokeModelResponseTypeDef, ) +from strands.models.bedrock import ModelThrottledException # Optional import for strands-agents (may not be installed in all environments) try: From acd8967fe0019269f778eb0b41d97c5bb79ebb5d Mon Sep 17 00:00:00 2001 From: Kazmer Nagy-Betegh Date: Wed, 3 Dec 2025 12:00:43 +0000 Subject: [PATCH 30/30] small fixes --- .../idp_common/assessment/granular_service.py | 8 +- .../idp_common/assessment/strands_executor.py | 82 +++++++++---------- .../idp_common/assessment/strands_tools.py | 2 +- .../idp_common/config/models.py | 8 -- .../idp_common/extraction/agentic_idp.py | 4 + .../idp_common/utils/bedrock_utils.py | 25 +++--- 6 files changed, 62 insertions(+), 67 deletions(-) diff --git a/lib/idp_common_pkg/idp_common/assessment/granular_service.py b/lib/idp_common_pkg/idp_common/assessment/granular_service.py index 008ac058..51b0da9a 100644 --- a/lib/idp_common_pkg/idp_common/assessment/granular_service.py +++ b/lib/idp_common_pkg/idp_common/assessment/granular_service.py @@ -11,15 +11,19 @@ 4. Maintaining assessment structure that mirrors extraction results """ +from __future__ import annotations + import json import os import time -from typing import Any +from typing import TYPE_CHECKING, Any from aws_lambda_powertools import Logger -from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource from idp_common import image, metrics, s3, utils + +if TYPE_CHECKING: + from mypy_boto3_dynamodb.service_resource import DynamoDBServiceResource from idp_common.assessment.models import AssessmentResult, AssessmentTask from idp_common.assessment.strands_executor import execute_assessment_tasks_parallel from idp_common.assessment.strands_service import _convert_field_path_to_string diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py index a3330304..0a95fade 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_executor.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_executor.py @@ -6,6 +6,7 @@ """ import asyncio +import concurrent.futures import os import time from typing import Any, cast @@ -188,53 +189,44 @@ def execute_assessment_tasks_parallel( start_time = time.time() - # Run async executor - # Use asyncio.run() for clean event loop management + # Define the async coroutine to run + async def _run() -> tuple[list[AssessmentResult], dict[str, Any]]: + return await execute_tasks_async( + tasks=tasks, + extraction_results=extraction_results, + page_images=page_images, + sorted_page_ids=sorted_page_ids, + model_id=model_id, + system_prompt=system_prompt, + temperature=temperature, + max_tokens=max_tokens, + document_schema=document_schema, + max_concurrent=max_concurrent, + max_retries=max_retries, + connect_timeout=connect_timeout, + read_timeout=read_timeout, + ) + + # Check if there's already a running event loop + # This is more robust than catching exceptions with string matching try: - results, metering = asyncio.run( - execute_tasks_async( - tasks=tasks, - extraction_results=extraction_results, - page_images=page_images, - sorted_page_ids=sorted_page_ids, - model_id=model_id, - system_prompt=system_prompt, - temperature=temperature, - max_tokens=max_tokens, - document_schema=document_schema, - max_concurrent=max_concurrent, - max_retries=max_retries, - connect_timeout=connect_timeout, - read_timeout=read_timeout, - ) + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop is not None and loop.is_running(): + # We're inside an async context (e.g., Jupyter, nested async call) + # Execute in a separate thread to avoid "cannot be called from a running event loop" + logger.warning( + "Event loop already running, executing in separate thread", + extra={"loop": str(loop)}, ) - except RuntimeError as e: - # Handle case where event loop already exists (shouldn't happen in Lambda) - if "There is no current event loop" in str(e) or "asyncio.run()" in str(e): - logger.warning( - "Event loop already exists, using get_event_loop", - extra={"error": str(e)}, - ) - loop = asyncio.get_event_loop() - results, metering = loop.run_until_complete( - execute_tasks_async( - tasks=tasks, - extraction_results=extraction_results, - page_images=page_images, - sorted_page_ids=sorted_page_ids, - model_id=model_id, - system_prompt=system_prompt, - temperature=temperature, - max_tokens=max_tokens, - document_schema=document_schema, - max_concurrent=max_concurrent, - max_retries=max_retries, - connect_timeout=connect_timeout, - read_timeout=read_timeout, - ) - ) - else: - raise + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(asyncio.run, _run()) + results, metering = future.result() + else: + # No running loop - safe to use asyncio.run() + results, metering = asyncio.run(_run()) duration = time.time() - start_time diff --git a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py index 57f5b220..d2a7ebbc 100644 --- a/lib/idp_common_pkg/idp_common/assessment/strands_tools.py +++ b/lib/idp_common_pkg/idp_common/assessment/strands_tools.py @@ -172,7 +172,7 @@ def create_strands_tools( Create all tools needed for Strands-based assessment. Args: - page_images: List of page image bytes (with grid overlay already applied) + page_images: List of raw page image bytes (ruler overlay added on-demand by view_image tool) sorted_page_ids: List of page IDs in sorted order Returns: diff --git a/lib/idp_common_pkg/idp_common/config/models.py b/lib/idp_common_pkg/idp_common/config/models.py index ef5c5b15..3224beea 100644 --- a/lib/idp_common_pkg/idp_common/config/models.py +++ b/lib/idp_common_pkg/idp_common/config/models.py @@ -141,14 +141,6 @@ def set_default_review_agent_model(self) -> Self: return self - @model_validator(mode="after") - def set_default_review_agent_model(self) -> Self: - """Set review_agent_model to extraction model if not specified.""" - if not self.agentic.review_agent_model: - self.agentic.review_agent_model = self.model - - return self - class ClassificationConfig(BaseModel): """Document classification configuration""" diff --git a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py index c24769f5..e4f01ea3 100644 --- a/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py +++ b/lib/idp_common_pkg/idp_common/extraction/agentic_idp.py @@ -11,6 +11,7 @@ import json import logging import os +import re import threading from pathlib import Path from typing import ( @@ -21,6 +22,7 @@ import jsonpatch from aws_lambda_powertools import Logger +from botocore.config import Config from PIL import Image from pydantic import BaseModel, Field from strands import Agent, tool @@ -36,6 +38,8 @@ from idp_common.bedrock import ( build_model_config, + supports_prompt_caching, + supports_tool_caching, ) from idp_common.config.models import IDPConfig from idp_common.utils.bedrock_utils import ( diff --git a/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py b/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py index 987e2e10..674a2520 100644 --- a/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py +++ b/lib/idp_common_pkg/idp_common/utils/bedrock_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import json import logging @@ -7,19 +9,20 @@ import time from collections.abc import Awaitable, Callable from functools import wraps -from typing import Unpack +from typing import TYPE_CHECKING, Unpack import botocore.exceptions -from mypy_boto3_bedrock_runtime import BedrockRuntimeClient -from mypy_boto3_bedrock_runtime.type_defs import ( - ConverseRequestTypeDef, - ConverseResponseTypeDef, - ConverseStreamRequestTypeDef, - ConverseStreamResponseTypeDef, - InvokeModelRequestTypeDef, - InvokeModelResponseTypeDef, -) -from strands.models.bedrock import ModelThrottledException + +if TYPE_CHECKING: + from mypy_boto3_bedrock_runtime import BedrockRuntimeClient + from mypy_boto3_bedrock_runtime.type_defs import ( + ConverseRequestTypeDef, + ConverseResponseTypeDef, + ConverseStreamRequestTypeDef, + ConverseStreamResponseTypeDef, + InvokeModelRequestTypeDef, + InvokeModelResponseTypeDef, + ) # Optional import for strands-agents (may not be installed in all environments) try: