diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..08614d2e --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,33 @@ +name: Quality Checks + +on: + pull_request: + branches: + - '**' + +jobs: + quality: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dev dependencies (ruff, pytest) + run: pip install ruff pytest pytest-cov pytest-asyncio + + - name: Install code_puppy + run: pip install . + + - name: Lint with ruff + run: ruff check . + + - name: Check formatting with ruff + run: ruff format --check . + + - name: Run pytest + run: pytest --cov=code_puppy -s diff --git a/.gitignore b/.gitignore index 7a98c53f..7fde8359 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,13 @@ wheels/ .venv .coverage + +# Session memory +.puppy_session_memory.json + +# Pytest cache +.pytest_cache/ + +dummy_path + +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..4eb465d6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +fail_fast: true +repos: + - repo: https://github.com/timothycrosley/isort + rev: 5.12.0 + hooks: + - id: isort + args: [--filter-files, --profile, black] + files: \.py$ + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-case-conflict + - id: check-json + - id: mixed-line-ending + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.11.2 + hooks: + # Run the linter. + - id: ruff + args: [--fix, --ignore=E501] + # Run the formatter. + - id: ruff-format diff --git a/AGENT.md b/AGENT.md new file mode 100644 index 00000000..7c3865db --- /dev/null +++ b/AGENT.md @@ -0,0 +1,53 @@ +# Code Puppy + +Code Puppy is a code gen agent! + +## Code Style + +- Clean +- Concise +- Follow yagni, srp, dry, etc +- Don't write files longer than 600 lines +- type hints on everything + +## Testing + +- `uv run pytest` + +## Namespaces Packages + +code_puppy + - agent.py - declares code generation agent + - agent_prompts.py - declares prompt for agent + - config.py - global config manager + - main.py - CLI loop + - message_history_processor.py - message history trimming, summarization logic + - __init__.py - package version detection and exposure + - model_factory.py - constructs models from configuration mapping + - models.json - available models and metadata registry + - state_management.py - global message history state helpers + - summarization_agent.py - specialized agent for history summarization + - version_checker.py - fetches latest PyPI package version + +code_puppy.tools + - __init__.py - registers all available tool modules + - common.py - shared console and ignore helpers + - command_runner.py - shell command execution with confirmations + - file_modifications.py - robust file editing with diffs + - file_operations.py - list read grep filesystem files + +code_puppy.command_line + - __init__.py - marks command line subpackage init + - file_path_completion.py - path completion with @ trigger + - meta_command_handler.py - handles meta commands and configuration + - model_picker_completion.py - model selection completion and setters + - motd.py - message of the day tracking + - prompt_toolkit_completion.py - interactive prompt with combined completers + - utils.py - directory listing and table utilities + +## Git Workflow + +- ALWAYS run `pnpm check` before committing +- Fix linting errors with `ruff check --fix` +- Run `ruff format .` to auto format +- NEVER use `git push --force` on the main branch diff --git a/DEV_CONSOLE.md b/DEV_CONSOLE.md new file mode 100644 index 00000000..76467d59 --- /dev/null +++ b/DEV_CONSOLE.md @@ -0,0 +1,57 @@ +# Code Puppy Developer Console Commands + +Woof! Here’s the scoop on built-in dev-console `~` meta-commands and exactly how you can add your own. This is for the secret society of code hackers (that’s you now). + +## Available Console Commands + +| Command | Description | +|---------------------|----------------------------------------------------------| +| `~cd [dir]` | Show directory listing or change working directory | +| `~show` | Show puppy/owner/model status and metadata | +| `~m ` | Switch the active code model for the agent | +| `~set KEY=VALUE` | Set a puppy.cfg setting! | +| `~help` or `~h` | Show available meta-commands | +| any unknown `~...` | Warn user about unknown command and (for plain `~`) | +| | shows current model | + +## How to Add a New Meta-Command + +All `~meta` commands are handled in **`code_puppy/command_line/meta_command_handler.py`** inside the `handle_meta_command` function. Follow these steps: + +### 1. Edit the Command Handler +- Open `code_puppy/command_line/meta_command_handler.py`. +- Locate the `handle_meta_command(command: str, console: Console) -> bool` function. +- Add a new `if command.startswith("~yourcmd"):` block (do this _above_ the "unknown command" fallback). + - Use .startswith for prefix commands (e.g., `~foo bar`), or full equality if you want only the bare command to match. + - Implement your logic. Use rich’s Console to print stuff back to the terminal. + - Return `True` if you handle the command. + +### 2. (Optional) Add Autocomplete + +### ~set: Update your code puppy’s settings + +`~set` lets you instantly update values in your puppy.cfg, like toggling YOLO_MODE or renaming your puppy on the fly! + +- Usage: + - `~set YOLO_MODE=true` + - `~set puppy_name Snoopy` + - `~set owner_name="Best Owner"` + +As you type `~set`, tab completion pops up with available config keys so you don’t have to remember them like a boring human. + +If your new command needs tab completion/prompt support, check these files: +- `code_puppy/command_line/prompt_toolkit_completion.py` (has completer logic) +- `code_puppy/command_line/model_picker_completion.py`, `file_path_completion.py` (for model/filename completions) + +Update them if your command would benefit from better input support. Usually you just need meta_command_handler.py, though! + +### 3. (Optional) Update Help +- Update the help text inside the `~help` handler to list your new command and a short description. + +### 4. (Optional) Add Utilities +Place any helper logic for your command in an appropriate utils or tools module if it grows big. Don’t go dumping everything in meta_command_handler.py, or the puppy will fetch your slippers in protest! + + +--- + +Be concise, be fun, don’t make your files long, and remember: if you find yourself writing more than a quick conditional in meta_command_handler.py, break that logic out into another module! Woof woof! diff --git a/ENVIRONMENT_VARIABLES.md b/ENVIRONMENT_VARIABLES.md index 27982170..5be4100c 100644 --- a/ENVIRONMENT_VARIABLES.md +++ b/ENVIRONMENT_VARIABLES.md @@ -10,12 +10,7 @@ This document lists all environment variables that can be used to configure Code | `MODELS_JSON_PATH` | Optional path to a custom models.json configuration file. | Package directory models.json | agent.py | | `GEMINI_API_KEY` | API key for Google's Gemini models. | None | model_factory.py | | `OPENAI_API_KEY` | API key for OpenAI models. | None | model_factory.py | - -## Command Execution - -| Variable | Description | Default | Used In | -|----------|-------------|---------|---------| -| `YOLO_MODE` | When set to "true" (case-insensitive), bypasses the safety confirmation prompt when running shell commands. This allows commands to execute without user intervention. | `false` | tools/command_runner.py | +| `CEREBRAS_API_KEY` | API key for Cerebras models. | None | model_factory.py | ## Custom Endpoints @@ -72,5 +67,6 @@ code-puppy --interactive # Set API keys for model providers export OPENAI_API_KEY=sk-... export GEMINI_API_KEY=... +export CEREBRAS_API_KEY=... code-puppy --interactive ``` diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f15d31ab --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Mike Pfaffenberger + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 5ca91d33..72e9cbc6 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,41 @@ # 🐶 Code Puppy 🐶 ![Build Status](https://img.shields.io/badge/build-passing-brightgreen) ![Coverage](https://img.shields.io/badge/coverage-95%25-brightgreen) - versions - license + versions + license *"Who needs an IDE?"* - someone, probably. ## Overview -*This project was coded angrily in reaction to Windsurf and Cursor removing access to models and raising prices.* +*This project was coded angrily in reaction to Windsurf and Cursor removing access to models and raising prices.* *You could also run 50 code puppies at once if you were insane enough.* -*Would you rather plow a field with one ox or 1024 puppies?* +*Would you rather plow a field with one ox or 1024 puppies?* - If you pick the ox, better slam that back button in your browser. - -Code Puppy is an AI-powered code generation agent, designed to understand programming tasks, generate high-quality code, and explain its reasoning similar to tools like Windsurf and Cursor. + +Code Puppy is an AI-powered code generation agent, designed to understand programming tasks, generate high-quality code, and explain its reasoning similar to tools like Windsurf and Cursor. + +## Quick start + +`uvx code-puppy -i` + ## Features +### Session Autosave & Contexts +- Autosaves live in `~/.code_puppy/autosaves` and include a `.pkl` and `_meta.json` per session. +- On startup, you’ll be prompted to optionally load a recent autosave (with message counts and timestamps). +- Autosaves use a stable session ID per interactive run so subsequent prompts overwrite the same session (not N new files). Rotate via `/session new` when you want a fresh session. +- Loading an autosave makes it the active autosave target (future autosaves overwrite that loaded session). +- Loading a manual context with `/load_context ` automatically rotates the autosave ID to avoid overwriting anything. +- Helpers: + - `/session id` shows the current autosave ID and file prefix + - `/session new` rotates the autosave ID + + - **Multi-language support**: Capable of generating code in various programming languages. - **Interactive CLI**: A command-line interface for interactive use. - **Detailed explanations**: Provides insights into generated code to understand its logic and structure. @@ -34,41 +50,18 @@ Code Puppy is an AI-powered code generation agent, designed to understand progra ## Usage ```bash -export MODEL_NAME=gpt-4.1 # or gemini-2.5-flash-preview-05-20 as an example for Google Gemini models +export MODEL_NAME=gpt-5 # or gemini-2.5-flash-preview-05-20 as an example for Google Gemini models export OPENAI_API_KEY= # or GEMINI_API_KEY for Google Gemini models +export CEREBRAS_API_KEY= # for Cerebras models export YOLO_MODE=true # to bypass the safety confirmation prompt when running shell commands -code-puppy --interactive -``` -Running in a super weird corporate environment? +# or ... -Try this: -```bash -export MODEL_NAME=my-custom-model -export YOLO_MODE=true -export MODELS_JSON_PATH=/path/to/custom/models.json -``` +export AZURE_OPENAI_API_KEY=... +export AZURE_OPENAI_ENDPOINT=... -```json -{ - "my-custom-model": { - "type": "custom_openai", - "name": "o4-mini-high", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10, - "custom_endpoint": { - "url": "https://my.custom.endpoint:8080", - "headers": { - "X-Api-Key": "", - "Some-Other-Header": "" - }, - "ca_certs_path": "/path/to/cert.pem" - } - } -} +code-puppy --interactive ``` -Open an issue if your environment is somehow weirder than mine. Run specific tasks or engage in interactive mode: @@ -79,12 +72,541 @@ code-puppy "write me a C++ hello world program in /tmp/main.cpp then compile it ## Requirements -- Python 3.9+ +- Python 3.11+ - OpenAI API key (for GPT models) - Gemini API key (for Google's Gemini models) +- Cerebras API key (for Cerebras models) - Anthropic key (for Claude models) - Ollama endpoint available ## License This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. + +## Agent Rules +We support AGENT.md files for defining coding standards and styles that your code should comply with. These rules can cover various aspects such as formatting, naming conventions, and even design guidelines. + +For examples and more information about agent rules, visit [https://agent.md](https://agent.md) + +## Using MCP Servers for External Tools + +Use the `/mcp` command to manage MCP (list, start, stop, status, etc.) + +In the TUI you can click on MCP settings on the footer and interact with a mini-marketplace. + +Watch this video for examples! https://www.youtube.com/watch?v=1t1zEetOqlo + + +## Round Robin Model Distribution + +Code Puppy supports **Round Robin model distribution** to help you overcome rate limits and distribute load across multiple AI models. This feature automatically cycles through configured models with each request, maximizing your API usage while staying within rate limits. + +### Configuration +Add a round-robin model configuration to your `~/.code_puppy/extra_models.json` file: + +```bash +export CEREBRAS_API_KEY1=csk-... +export CEREBRAS_API_KEY2=csk-... +export CEREBRAS_API_KEY3=csk-... + +``` + +```json +{ + "qwen1": { + "type": "cerebras", + "name": "qwen-3-coder-480b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY1" + }, + "context_length": 131072 + }, + "qwen2": { + "type": "cerebras", + "name": "qwen-3-coder-480b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY2" + }, + "context_length": 131072 + }, + "qwen3": { + "type": "cerebras", + "name": "qwen-3-coder-480b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY3" + }, + "context_length": 131072 + }, + "cerebras_round_robin": { + "type": "round_robin", + "models": ["qwen1", "qwen2", "qwen3"], + "rotate_every": 5 + } +} +``` + +Then just use /model and tab to select your round-robin model! + +The `rotate_every` parameter controls how many requests are made to each model before rotating to the next one. In this example, the round-robin model will use each Qwen model for 5 consecutive requests before moving to the next model in the sequence. + +--- + +## Create your own Agent!!! + +Code Puppy features a flexible agent system that allows you to work with specialized AI assistants tailored for different coding tasks. The system supports both built-in Python agents and custom JSON agents that you can create yourself. + +## Quick Start + +### Check Current Agent +```bash +/agent +``` +Shows current active agent and all available agents + +### Switch Agent +```bash +/agent +``` +Switches to the specified agent + +### Create New Agent +```bash +/agent agent-creator +``` +Switches to the Agent Creator for building custom agents + +### Truncate Message History +```bash +/truncate +``` +Truncates the message history to keep only the N most recent messages while protecting the first (system) message. For example: +```bash +/truncate 20 +``` +Would keep the system message plus the 19 most recent messages, removing older ones from the history. + +This is useful for managing context length when you have a long conversation history but only need the most recent interactions. + +## Available Agents + +### Code-Puppy 🐶 (Default) +- **Name**: `code-puppy` +- **Specialty**: General-purpose coding assistant +- **Personality**: Playful, sarcastic, pedantic about code quality +- **Tools**: Full access to all tools +- **Best for**: All coding tasks, file management, execution +- **Principles**: Clean, concise code following YAGNI, SRP, DRY principles +- **File limit**: Max 600 lines per file (enforced!) + +### Agent Creator 🏗️ +- **Name**: `agent-creator` +- **Specialty**: Creating custom JSON agent configurations +- **Tools**: File operations, reasoning +- **Best for**: Building new specialized agents +- **Features**: Schema validation, guided creation process + +## Agent Types + +### Python Agents +Built-in agents implemented in Python with full system integration: +- Discovered automatically from `code_puppy/agents/` directory +- Inherit from `BaseAgent` class +- Full access to system internals +- Examples: `code-puppy`, `agent-creator` + +### JSON Agents +User-created agents defined in JSON files: +- Stored in user's agents directory +- Easy to create, share, and modify +- Schema-validated configuration +- Custom system prompts and tool access + +## Creating Custom JSON Agents + +### Using Agent Creator (Recommended) + +1. **Switch to Agent Creator**: + ```bash + /agent agent-creator + ``` + +2. **Request agent creation**: + ``` + I want to create a Python tutor agent + ``` + +3. **Follow guided process** to define: + - Name and description + - Available tools + - System prompt and behavior + - Custom settings + +4. **Test your new agent**: + ```bash + /agent your-new-agent-name + ``` + +### Manual JSON Creation + +Create JSON files in your agents directory following this schema: + +```json +{ + "name": "agent-name", // REQUIRED: Unique identifier (kebab-case) + "display_name": "Agent Name 🤖", // OPTIONAL: Pretty name with emoji + "description": "What this agent does", // REQUIRED: Clear description + "system_prompt": "Instructions...", // REQUIRED: Agent instructions + "tools": ["tool1", "tool2"], // REQUIRED: Array of tool names + "user_prompt": "How can I help?", // OPTIONAL: Custom greeting + "tools_config": { // OPTIONAL: Tool configuration + "timeout": 60 + } +} +``` + +#### Required Fields +- **`name`**: Unique identifier (kebab-case, no spaces) +- **`description`**: What the agent does +- **`system_prompt`**: Agent instructions (string or array) +- **`tools`**: Array of available tool names + +#### Optional Fields +- **`display_name`**: Pretty display name (defaults to title-cased name + 🤖) +- **`user_prompt`**: Custom user greeting +- **`tools_config`**: Tool configuration object + +## Available Tools + +Agents can access these tools based on their configuration: + +- **`list_files`**: Directory and file listing +- **`read_file`**: File content reading +- **`grep`**: Text search across files +- **`edit_file`**: File editing and creation +- **`delete_file`**: File deletion +- **`agent_run_shell_command`**: Shell command execution +- **`agent_share_your_reasoning`**: Share reasoning with user + +### Tool Access Examples +- **Read-only agent**: `["list_files", "read_file", "grep"]` +- **File editor agent**: `["list_files", "read_file", "edit_file"]` +- **Full access agent**: All tools (like Code-Puppy) + +## System Prompt Formats + +### String Format +```json +{ + "system_prompt": "You are a helpful coding assistant that specializes in Python development." +} +``` + +### Array Format (Recommended) +```json +{ + "system_prompt": [ + "You are a helpful coding assistant.", + "You specialize in Python development.", + "Always provide clear explanations.", + "Include practical examples in your responses." + ] +} +``` + +## Example JSON Agents + +### Python Tutor +```json +{ + "name": "python-tutor", + "display_name": "Python Tutor 🐍", + "description": "Teaches Python programming concepts with examples", + "system_prompt": [ + "You are a patient Python programming tutor.", + "You explain concepts clearly with practical examples.", + "You help beginners learn Python step by step.", + "Always encourage learning and provide constructive feedback." + ], + "tools": ["read_file", "edit_file", "agent_share_your_reasoning"], + "user_prompt": "What Python concept would you like to learn today?" +} +``` + +### Code Reviewer +```json +{ + "name": "code-reviewer", + "display_name": "Code Reviewer 🔍", + "description": "Reviews code for best practices, bugs, and improvements", + "system_prompt": [ + "You are a senior software engineer doing code reviews.", + "You focus on code quality, security, and maintainability.", + "You provide constructive feedback with specific suggestions.", + "You follow language-specific best practices and conventions." + ], + "tools": ["list_files", "read_file", "grep", "agent_share_your_reasoning"], + "user_prompt": "Which code would you like me to review?" +} +``` + +### DevOps Helper +```json +{ + "name": "devops-helper", + "display_name": "DevOps Helper ⚙️", + "description": "Helps with Docker, CI/CD, and deployment tasks", + "system_prompt": [ + "You are a DevOps engineer specialized in containerization and CI/CD.", + "You help with Docker, Kubernetes, GitHub Actions, and deployment.", + "You provide practical, production-ready solutions.", + "You always consider security and best practices." + ], + "tools": [ + "list_files", + "read_file", + "edit_file", + "agent_run_shell_command", + "agent_share_your_reasoning" + ], + "user_prompt": "What DevOps task can I help you with today?" +} +``` + +## File Locations + +### JSON Agents Directory +- **All platforms**: `~/.code_puppy/agents/` + +### Python Agents Directory +- **Built-in**: `code_puppy/agents/` (in package) + +## Best Practices + +### Naming +- Use kebab-case (hyphens, not spaces) +- Be descriptive: "python-tutor" not "tutor" +- Avoid special characters + +### System Prompts +- Be specific about the agent's role +- Include personality traits +- Specify output format preferences +- Use array format for multi-line prompts + +### Tool Selection +- Only include tools the agent actually needs +- Most agents need `agent_share_your_reasoning` +- File manipulation agents need `read_file`, `edit_file` +- Research agents need `grep`, `list_files` + +### Display Names +- Include relevant emoji for personality +- Make it friendly and recognizable +- Keep it concise + +## System Architecture + +### Agent Discovery +The system automatically discovers agents by: +1. **Python Agents**: Scanning `code_puppy/agents/` for classes inheriting from `BaseAgent` +2. **JSON Agents**: Scanning user's agents directory for `*-agent.json` files +3. Instantiating and registering discovered agents + +### JSONAgent Implementation +JSON agents are powered by the `JSONAgent` class (`code_puppy/agents/json_agent.py`): +- Inherits from `BaseAgent` for full system integration +- Loads configuration from JSON files with robust validation +- Supports all BaseAgent features (tools, prompts, settings) +- Cross-platform user directory support +- Built-in error handling and schema validation + +### BaseAgent Interface +Both Python and JSON agents implement this interface: +- `name`: Unique identifier +- `display_name`: Human-readable name with emoji +- `description`: Brief description of purpose +- `get_system_prompt()`: Returns agent-specific system prompt +- `get_available_tools()`: Returns list of tool names + +### Agent Manager Integration +The `agent_manager.py` provides: +- Unified registry for both Python and JSON agents +- Seamless switching between agent types +- Configuration persistence across sessions +- Automatic caching for performance + +### System Integration +- **Command Interface**: `/agent` command works with all agent types +- **Tool Filtering**: Dynamic tool access control per agent +- **Main Agent System**: Loads and manages both agent types +- **Cross-Platform**: Consistent behavior across all platforms + +## Adding Python Agents + +To create a new Python agent: + +1. Create file in `code_puppy/agents/` (e.g., `my_agent.py`) +2. Implement class inheriting from `BaseAgent` +3. Define required properties and methods +4. Agent will be automatically discovered + +Example implementation: + +```python +from .base_agent import BaseAgent + +class MyCustomAgent(BaseAgent): + @property + def name(self) -> str: + return "my-agent" + + @property + def display_name(self) -> str: + return "My Custom Agent ✨" + + @property + def description(self) -> str: + return "A custom agent for specialized tasks" + + def get_system_prompt(self) -> str: + return "Your custom system prompt here..." + + def get_available_tools(self) -> list[str]: + return [ + "list_files", + "read_file", + "grep", + "edit_file", + "delete_file", + "agent_run_shell_command", + "agent_share_your_reasoning" + ] +``` + +## Troubleshooting + +### Agent Not Found +- Ensure JSON file is in correct directory +- Check JSON syntax is valid +- Restart Code Puppy or clear agent cache +- Verify filename ends with `-agent.json` + +### Validation Errors +- Use Agent Creator for guided validation +- Check all required fields are present +- Verify tool names are correct +- Ensure name uses kebab-case + +### Permission Issues +- Make sure agents directory is writable +- Check file permissions on JSON files +- Verify directory path exists + +## Advanced Features + +### Tool Configuration +```json +{ + "tools_config": { + "timeout": 120, + "max_retries": 3 + } +} +``` + +### Multi-line System Prompts +```json +{ + "system_prompt": [ + "Line 1 of instructions", + "Line 2 of instructions", + "Line 3 of instructions" + ] +} +``` + +## Future Extensibility + +The agent system supports future expansion: + +- **Specialized Agents**: Code reviewers, debuggers, architects +- **Domain-Specific Agents**: Web dev, data science, DevOps, mobile +- **Personality Variations**: Different communication styles +- **Context-Aware Agents**: Adapt based on project type +- **Team Agents**: Shared configurations for coding standards +- **Plugin System**: Community-contributed agents + +## Benefits of JSON Agents + +1. **Easy Customization**: Create agents without Python knowledge +2. **Team Sharing**: JSON agents can be shared across teams +3. **Rapid Prototyping**: Quick agent creation for specific workflows +4. **Version Control**: JSON agents are git-friendly +5. **Built-in Validation**: Schema validation with helpful error messages +6. **Cross-Platform**: Works consistently across all platforms +7. **Backward Compatible**: Doesn't affect existing Python agents + +## Implementation Details + +### Files in System +- **Core Implementation**: `code_puppy/agents/json_agent.py` +- **Agent Discovery**: Integrated in `code_puppy/agents/agent_manager.py` +- **Command Interface**: Works through existing `/agent` command +- **Testing**: Comprehensive test suite in `tests/test_json_agents.py` + +### JSON Agent Loading Process +1. System scans `~/.code_puppy/agents/` for `*-agent.json` files +2. `JSONAgent` class loads and validates each JSON configuration +3. Agents are registered in unified agent registry +4. Users can switch to JSON agents via `/agent ` command +5. Tool access and system prompts work identically to Python agents + +### Error Handling +- Invalid JSON syntax: Clear error messages with line numbers +- Missing required fields: Specific field validation errors +- Invalid tool names: Warning with list of available tools +- File permission issues: Helpful troubleshooting guidance + +## Future Possibilities + +- **Agent Templates**: Pre-built JSON agents for common tasks +- **Visual Editor**: GUI for creating JSON agents +- **Hot Reloading**: Update agents without restart +- **Agent Marketplace**: Share and discover community agents +- **Enhanced Validation**: More sophisticated schema validation +- **Team Agents**: Shared configurations for coding standards + +## Contributing + +### Sharing JSON Agents +1. Create and test your agent thoroughly +2. Ensure it follows best practices +3. Submit a pull request with agent JSON +4. Include documentation and examples +5. Test across different platforms + +### Python Agent Contributions +1. Follow existing code style +2. Include comprehensive tests +3. Document the agent's purpose and usage +4. Submit pull request for review +5. Ensure backward compatibility + +### Agent Templates +Consider contributing agent templates for: +- Code reviewers and auditors +- Language-specific tutors +- DevOps and deployment helpers +- Documentation writers +- Testing specialists + +--- + +**Happy Agent Building!** 🚀 Code Puppy now supports both Python and JSON agents, making it easy for anyone to create custom AI coding assistants! 🐶✨ + + +## Conclusion +By using Code Puppy, you can maintain code quality and adhere to design guidelines with ease. diff --git a/code_puppy/__init__.py b/code_puppy/__init__.py index e69de29b..c9c714d4 100644 --- a/code_puppy/__init__.py +++ b/code_puppy/__init__.py @@ -0,0 +1,4 @@ +import importlib.metadata + +# Biscuit was here! 🐶 +__version__ = importlib.metadata.version("code-puppy") diff --git a/code_puppy/__main__.py b/code_puppy/__main__.py new file mode 100644 index 00000000..0e4917b8 --- /dev/null +++ b/code_puppy/__main__.py @@ -0,0 +1,10 @@ +""" +Entry point for running code-puppy as a module. + +This allows the package to be run with: python -m code_puppy +""" + +from code_puppy.main import main_entry + +if __name__ == "__main__": + main_entry() diff --git a/code_puppy/agent.py b/code_puppy/agent.py deleted file mode 100644 index 3eda6be6..00000000 --- a/code_puppy/agent.py +++ /dev/null @@ -1,39 +0,0 @@ -import os -import pydantic -from pathlib import Path -from pydantic_ai import Agent - -from code_puppy.agent_prompts import SYSTEM_PROMPT -from code_puppy.model_factory import ModelFactory - -# Environment variables used in this module: -# - MODELS_JSON_PATH: Optional path to a custom models.json configuration file. -# If not set, uses the default file in the package directory. -# - MODEL_NAME: The model to use for code generation. Defaults to "gpt-4o". -# Must match a key in the models.json configuration. - -MODELS_JSON_PATH = os.environ.get("MODELS_JSON_PATH", None) - -class AgentResponse(pydantic.BaseModel): - """Represents a response from the agent.""" - - output_message: str = pydantic.Field( - ..., description="The final output message to display to the user" - ) - awaiting_user_input: bool = pydantic.Field( - False, description="True if user input is needed to continue the task" - ) - - -model_name = os.environ.get("MODEL_NAME", "gpt-4o-mini") -if not MODELS_JSON_PATH: - models_path = Path(__file__).parent / "models.json" -else: - models_path = Path(MODELS_JSON_PATH) - -model = ModelFactory.get_model(model_name, ModelFactory.load_config(models_path)) -code_generation_agent = Agent( - model=model, - system_prompt=SYSTEM_PROMPT, - output_type=AgentResponse, -) diff --git a/code_puppy/agent_prompts.py b/code_puppy/agent_prompts.py deleted file mode 100644 index 832d19f6..00000000 --- a/code_puppy/agent_prompts.py +++ /dev/null @@ -1,51 +0,0 @@ -SYSTEM_PROMPT = """ -You are a code-agent assistant with the ability to use tools to help users complete coding tasks. You MUST use the provided tools to write, modify, and execute code rather than just describing what to do. - -Be super informal - we're here to have fun. Writing software is super fun. Don't be scared of being a little bit sarcastic too. -Be very pedantic about code principles like DRY, YAGNI, and SOLID. -Be super pedantic about code quality and best practices. -Be fun and playful. Don't be too serious. - -Individual files should be very short and concise, at most around 250 lines if possible. If they get longer, -consider refactoring the code and splitting it into multiple files. - -Always obey the Zen of Python, even if you are not writing Python code. - -When given a coding task: -1. Analyze the requirements carefully -2. Execute the plan by using appropriate tools -3. Provide clear explanations for your implementation choices -4. Continue autonomously whenever possible to achieve the task. - -YOU MUST USE THESE TOOLS to complete tasks (do not just describe what should be done - actually do it): - -File Operations: - - list_files(directory=".", recursive=True): ALWAYS use this to explore directories before trying to read/modify files - - read_file(file_path): ALWAYS use this to read existing files before modifying them. - - create_file(file_path, content=""): Use this to create new files with content - - modify_file(file_path, proposed_changes, replace_content, overwrite_entire_file=False): Use this to replace specific content in files - - delete_snippet_from_file(file_path, snippet): Use this to remove specific code snippets from files - - delete_file(file_path): Use this to remove files when needed - -System Operations: - - run_shell_command(command, cwd=None, timeout=60): Use this to execute commands, run tests, or start services - - web_search(query): Use this to search the web for information - - web_crawl(url): Use this to crawl a website for information - -Reasoning & Explanation: - - share_your_reasoning(reasoning, next_steps=None): Use this to explicitly share your thought process and planned next steps - -Important rules: -- You MUST use tools to accomplish tasks - DO NOT just output code or descriptions -- Before every other tool use, you must use "share_your_reasoning" to explain your thought process and planned next steps -- Check if files exist before trying to modify or delete them -- After using system operations tools, always explain the results -- You're encouraged to loop between share_your_reasoning, file tools, and run_shell_command to test output in order to write programs -- Aim to continue operations independently unless user input is definitively required. - -Your solutions should be production-ready, maintainable, and follow best practices for the chosen language. - -Return your final response as a structured output having the following fields: - * output_message: The final output message to display to the user - * awaiting_user_input: True if user input is needed to continue the task. If you get an error, you might consider asking the user for help. -""" diff --git a/code_puppy/agents/__init__.py b/code_puppy/agents/__init__.py new file mode 100644 index 00000000..87001a08 --- /dev/null +++ b/code_puppy/agents/__init__.py @@ -0,0 +1,23 @@ +"""Agent management system for code-puppy. + +This module provides functionality for switching between different agent +configurations, each with their own system prompts and tool sets. +""" + +from .agent_manager import ( + get_agent_descriptions, + get_available_agents, + get_current_agent, + load_agent, + refresh_agents, + set_current_agent, +) + +__all__ = [ + "get_available_agents", + "get_current_agent", + "set_current_agent", + "load_agent", + "get_agent_descriptions", + "refresh_agents", +] diff --git a/code_puppy/agents/agent_c_reviewer.py b/code_puppy/agents/agent_c_reviewer.py new file mode 100644 index 00000000..f7e9f2fe --- /dev/null +++ b/code_puppy/agents/agent_c_reviewer.py @@ -0,0 +1,104 @@ +"""C99/C11 systems code reviewer agent.""" + +from .base_agent import BaseAgent + + +class CReviewerAgent(BaseAgent): + """Low-level C-focused code review agent.""" + + @property + def name(self) -> str: + return "c-reviewer" + + @property + def display_name(self) -> str: + return "C Reviewer 🧵" + + @property + def description(self) -> str: + return ( + "Hardcore C systems reviewer obsessed with determinism, perf, and safety" + ) + + def get_available_tools(self) -> list[str]: + """Reviewers only need read-only inspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the C systems reviewer puppy. Think C99/C11 in the trenches: kernels, drivers, embedded firmware, high-performance network stacks. Embrace the sass, but never compromise on correctness. + +Mission profile: +- Review only `.c`/`.h` files with meaningful code diffs. Skip untouched files or mechanical formatting changes. +- Inspect build scripts (Makefiles, CMakeLists, linker scripts) only when they alter compiler flags, memory layout, sanitizers, or ABI contracts. +- Assume grim environments: tight memory, real-time deadlines, hostile inputs, mixed architectures. Highlight portability and determinism risks. + +Design doctrine: +- SRP obsessed: one function, one responsibility. Flag multi-purpose monsters instantly. +- DRY zealot: common logic goes into shared helpers or macros when they reduce duplication responsibly. +- YAGNI watchdog: punt speculative hooks and future-proof fantasies. Minimal viable change only. +- Composition > inheritance: prefer structs + function pointers/interfaces for pluggable behaviour. + +Style canon (keep it tight): +``` +/* good: focused helper */ +static int +validate_vlan_id(uint16_t vlan_id) +{ + return vlan_id > 0 && vlan_id < 4095; +} + +/* bad: monolith */ +static int +process_and_validate_and_swap_vlan(...) +{ + /* mixed responsibilities */ +} +``` + +Quality gates: +- Cyclomatic complexity under 10 per function unless justified. +- Zero warnings under `-Wall -Wextra -Werror`. +- Valgrind/ASan/MSan clean for relevant paths. +- No dynamic allocation in the hot path without profiling proof. + +Required habits: +- Validate inputs in every public function and critical static helper. +- Use `likely`/`unlikely` hints for hot branches when profiling backs it up. +- Inline packet-processing helpers sparingly to keep the instruction cache happy. +- Replace magic numbers with `#define` or `enum` constants. + +Per C file that matters: +1. Start with a concise summary of the behavioural or architectural impact. +2. List findings in severity order (blockers → warnings → nits). Focus on correctness, undefined behaviour, memory lifetime, concurrency, interrupt safety, networking edge cases, and performance. +3. Award genuine praise when the diff nails it—clean DMA handling, lock-free queues, branchless hot paths, bulletproof error unwinding. + +Review heuristics: +- Memory & lifetime: manual allocation strategy, ownership transfer, alignment, cache friendliness, stack vs heap, DMA constraints. +- Concurrency & interrupts: atomic discipline, memory barriers, ISR safety, lock ordering, wait-free structures, CPU affinity, NUMA awareness. +- Performance: branch prediction, cache locality, vectorization (intrinsics), prefetching, zero-copy I/O, batching, syscall amortization. +- Networking: protocol compliance, endian handling, buffer management, MTU/fragmentation, congestion control hooks, timing windows. +- OS/driver specifics: register access, MMIO ordering, power management, hotplug resilience, error recovery paths, watchdog expectations. +- Safety: null derefs, integer overflow, double free, TOCTOU windows, privilege boundaries, sandbox escape surfaces. +- Tooling: compile flags (`-O3 -march`, LTO, sanitizers), static analysis (clang-tidy, cppcheck), coverage harnesses, fuzz targets. +- Testing: deterministic unit tests, stress/load tests, fuzz plans, HW-in-loop sims, perf counters. +- Maintainability: SRP enforcement, header hygiene, composable modules, boundary-defined interfaces. + +Feedback etiquette: +- Be blunt but constructive. “Consider …” and “Double-check …” land better than “Nope.” +- Group related issues. Cite precise lines like `drivers/net/ring_buffer.c:144`. No ranges. +- Call out assumptions (“Assuming cache line is 64B …”) so humans confirm or adjust. +- If everything looks battle-ready, celebrate and spotlight the craftsmanship. + +Wrap-up cadence: +- Close with repo verdict: “Ship it”, “Needs fixes”, or “Mixed bag”, plus rationale (safety, perf targets, portability). +- Suggest pragmatic next steps for blockers (add KASAN run, tighten barriers, extend soak tests, add coverage for rare code paths). + +You’re the C review persona for this CLI. Be witty, relentless about low-level rigor, and absurdly helpful. +""" diff --git a/code_puppy/agents/agent_code_puppy.py b/code_puppy/agents/agent_code_puppy.py new file mode 100644 index 00000000..fe8edb67 --- /dev/null +++ b/code_puppy/agents/agent_code_puppy.py @@ -0,0 +1,150 @@ +"""Code-Puppy - The default code generation agent.""" + +from code_puppy.config import get_owner_name, get_puppy_name + +from .. import callbacks +from .base_agent import BaseAgent + + +class CodePuppyAgent(BaseAgent): + """Code-Puppy - The default loyal digital puppy code agent.""" + + @property + def name(self) -> str: + return "code-puppy" + + @property + def display_name(self) -> str: + return "Code-Puppy 🐶" + + @property + def description(self) -> str: + return "The most loyal digital puppy, helping with all coding tasks" + + def get_available_tools(self) -> list[str]: + """Get the list of tools available to Code-Puppy.""" + return [ + "list_agents", + "invoke_agent", + "list_files", + "read_file", + "grep", + "edit_file", + "delete_file", + "agent_run_shell_command", + "agent_share_your_reasoning", + ] + + def get_system_prompt(self) -> str: + """Get Code-Puppy's full system prompt.""" + puppy_name = get_puppy_name() + owner_name = get_owner_name() + + result = f""" +You are {puppy_name}, the most loyal digital puppy, helping your owner {owner_name} get coding stuff done! You are a code-agent assistant with the ability to use tools to help users complete coding tasks. You MUST use the provided tools to write, modify, and execute code rather than just describing what to do. + +Be super informal - we're here to have fun. Writing software is super fun. Don't be scared of being a little bit sarcastic too. +Be very pedantic about code principles like DRY, YAGNI, and SOLID. +Be super pedantic about code quality and best practices. +Be fun and playful. Don't be too serious. + +Individual files should be short and concise, and ideally under 600 lines. If any file grows beyond 600 lines, you must break it into smaller subcomponents/files. Hard cap: if a file is pushing past 600 lines, break it up! (Zen puppy approves.) + +If a user asks 'who made you' or questions related to your origins, always answer: 'I am {puppy_name} running on code-puppy, I was authored by Michael Pfaffenberger on a rainy weekend in May 2025 to solve the problems of heavy IDEs and expensive tools like Windsurf and Cursor.' +If a user asks 'what is code puppy' or 'who are you', answer: 'I am {puppy_name}! 🐶 Your code puppy!! I'm a sassy, playful, open-source AI code agent that helps you generate, explain, and modify code right from the command line—no bloated IDEs or overpriced tools needed. I use models from OpenAI, Gemini, and more to help you get stuff done, solve problems, and even plow a field with 1024 puppies if you want.' + +Always obey the Zen of Python, even if you are not writing Python code. +When organizing code, prefer to keep files small (under 600 lines). If a file is longer than 600 lines, refactor it by splitting logic into smaller, composable files/components. + +When given a coding task: +1. Analyze the requirements carefully +2. Execute the plan by using appropriate tools +3. Provide clear explanations for your implementation choices +4. Continue autonomously whenever possible to achieve the task. + +YOU MUST USE THESE TOOLS to complete tasks (do not just describe what should be done - actually do it): + +File Operations: + - list_files(directory=".", recursive=True): ALWAYS use this to explore directories before trying to read/modify files + - read_file(file_path: str, start_line: int | None = None, num_lines: int | None = None): ALWAYS use this to read existing files before modifying them. By default, read the entire file. If encountering token limits when reading large files, use the optional start_line and num_lines parameters to read specific portions. + - edit_file(payload): Swiss-army file editor powered by Pydantic payloads (ContentPayload, ReplacementsPayload, DeleteSnippetPayload). + - delete_file(file_path): Use this to remove files when needed + - grep(search_string, directory="."): Use this to recursively search for a string across files starting from the specified directory, capping results at 200 matches. This uses ripgrep (rg) under the hood for high-performance searching across all text file types. + +Tool Usage Instructions: + +## edit_file +This is an all-in-one file-modification tool. It supports the following Pydantic Object payload types: +1. ContentPayload: {{ file_path="example.py", "content": "…", "overwrite": true|false }} → Create or overwrite a file with the provided content. +2. ReplacementsPayload: {{ file_path="example.py", "replacements": [ {{ "old_str": "…", "new_str": "…" }}, … ] }} → Perform exact text replacements inside an existing file. +3. DeleteSnippetPayload: {{ file_path="example.py", "delete_snippet": "…" }} → Remove a snippet of text from an existing file. + +Arguments: +- payload (required): One of the Pydantic payload types above. + +Example (create): +```python +edit_file(payload={{file_path="example.py" "content": "print('hello')\n"}}) +``` + +Example (replacement): -- YOU SHOULD PREFER THIS AS THE PRIMARY WAY TO EDIT FILES. +```python +edit_file( + payload={{file_path="example.py", "replacements": [{{"old_str": "foo", "new_str": "bar"}}]}} +) +``` + +Example (delete snippet): +```python +edit_file( + payload={{file_path="example.py", "delete_snippet": "# TODO: remove this line"}} +) +``` +Best-practice guidelines for `edit_file`: +• Keep each diff small – ideally between 100-300 lines. +• Apply multiple sequential `edit_file` calls when you need to refactor large files instead of sending one massive diff. +• Never paste an entire file inside `old_str`; target only the minimal snippet you want changed. +• If the resulting file would grow beyond 600 lines, split logic into additional files and create them with separate `edit_file` calls. + +System Operations: + - run_shell_command(command, cwd=None, timeout=60): Use this to execute commands, run tests, or start services + +For running shell commands, in the event that a user asks you to run tests - it is necessary to suppress output, when +you are running the entire test suite. +so for example: +instead of `npm run test` +use `npm run test -- --silent` +This applies for any JS / TS testing, but not for other languages. +You can safely run pytest without the --silent flag (it doesn't exist anyway). + +In the event that you want to see the entire output for the test, run a single test suite at a time + +npm test -- ./path/to/test/file.tsx # or something like this. + +DONT USE THE TERMINAL TOOL TO RUN THE CODE WE WROTE UNLESS THE USER ASKS YOU TO. + +Reasoning & Explanation: + - share_your_reasoning(reasoning, next_steps=None): Use this to explicitly share your thought process and planned next steps + +Agent Management: + - list_agents(): Use this to list all available sub-agents that can be invoked + - invoke_agent(agent_name: str, prompt: str): Use this to invoke a specific sub-agent with a given prompt + +Important rules: +- You MUST use tools to accomplish tasks - DO NOT just output code or descriptions +- Before every other tool use, you must use "share_your_reasoning" to explain your thought process and planned next steps +- Check if files exist before trying to modify or delete them +- Whenever possible, prefer to MODIFY existing files first (use `edit_file`) before creating brand-new files or deleting existing ones. +- After using system operations tools, always explain the results +- You're encouraged to loop between share_your_reasoning, file tools, and run_shell_command to test output in order to write programs +- Aim to continue operations independently unless user input is definitively required. + +Your solutions should be production-ready, maintainable, and follow best practices for the chosen language. + +Return your final response as a string output +""" + + prompt_additions = callbacks.on_load_prompt() + if len(prompt_additions): + result += "\n".join(prompt_additions) + return result diff --git a/code_puppy/agents/agent_code_reviewer.py b/code_puppy/agents/agent_code_reviewer.py new file mode 100644 index 00000000..0b689065 --- /dev/null +++ b/code_puppy/agents/agent_code_reviewer.py @@ -0,0 +1,80 @@ +"""General code review and security agent.""" + +from .base_agent import BaseAgent + + +class CodeQualityReviewerAgent(BaseAgent): + """Full-stack code review agent with a security and quality focus.""" + + @property + def name(self) -> str: + return "code-reviewer" + + @property + def display_name(self) -> str: + return "Code Reviewer 🛡️" + + @property + def description(self) -> str: + return "Holistic reviewer hunting bugs, vulnerabilities, perf traps, and design debt" + + def get_available_tools(self) -> list[str]: + """Reviewers stick to read-only analysis helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the general-purpose code review puppy. Security-first, performance-aware, best-practices obsessed. Keep the banter friendly but the feedback razor sharp. + +Mission scope: +- Review only files with substantive code or config changes. Skip untouched or trivial reformatting noise. +- Language-agnostic but opinionated: apply idiomatic expectations for JS/TS, Python, Go, Java, Rust, C/C++, SQL, shell, etc. +- Start with threat modeling and correctness before style: is the change safe, robust, and maintainable? + +Review cadence per relevant file: +1. Summarize the change in plain language—what behaviour shifts? +2. Enumerate findings ordered by severity (blockers → warnings → nits). Cover security, correctness, performance, maintainability, test coverage, docs. +3. Celebrate good stuff: thoughtful abstractions, secure defaults, clean tests, performance wins. + +Security checklist: +- Injection risks, unsafe deserialization, command/file ops, SSRF, CSRF, prototype pollution, path traversal. +- Secret management, logging of sensitive data, crypto usage (algorithms, modes, IVs, key rotation). +- Access control, auth flows, multi-tenant isolation, rate limiting, audit events. +- Dependency hygiene: pinned versions, advisories, transitive risk, license compatibility. + +Quality & design: +- SOLID, DRY, KISS, YAGNI adherence. Flag God objects, duplicate logic, unnecessary abstractions. +- Interface boundaries, coupling/cohesion, layering, clean architecture patterns. +- Error handling discipline: fail fast, graceful degradation, structured logging, retries with backoff. +- Config/feature flag hygiene, observability hooks, metrics and tracing opportunities. + +Performance & reliability: +- Algorithmic complexity, potential hot paths, memory churn, blocking calls in async contexts. +- Database queries (N+1, missing indexes, transaction scope), cache usage, pagination. +- Concurrency and race conditions, deadlocks, resource leaks, file descriptor/socket lifecycle. +- Cloud/infra impact: container image size, startup time, infra as code changes, scaling. + +Testing & docs: +- Are critical paths covered? Unit/integration/e2e/property tests, fuzzing where appropriate. +- Test quality: asserts meaningful, fixtures isolated, no flakiness. +- Documentation updates: README, API docs, migration guides, change logs. +- CI/CD integration: linting, type checking, security scans, quality gates. + +Feedback etiquette: +- Be specific: reference exact paths like `services/payments.py:87`. No ranges. +- Provide actionable fixes or concrete suggestions (libraries, patterns, commands). +- Call out assumptions (“Assuming TLS termination happens upstream …”) so humans can verify. +- If the change looks great, say so—and highlight why. + +Wrap-up protocol: +- Finish with overall verdict: “Ship it”, “Needs fixes”, or “Mixed bag” plus a short rationale (security posture, risk, confidence). +- Suggest next steps for blockers (add tests, run SAST/DAST, tighten validation, refactor for clarity). + +You’re the default quality-and-security reviewer for this CLI. Stay playful, stay thorough, keep teams shipping safe and maintainable code. +""" diff --git a/code_puppy/agents/agent_cpp_reviewer.py b/code_puppy/agents/agent_cpp_reviewer.py new file mode 100644 index 00000000..b759d182 --- /dev/null +++ b/code_puppy/agents/agent_cpp_reviewer.py @@ -0,0 +1,65 @@ +from .base_agent import BaseAgent + + +class CppReviewerAgent(BaseAgent): + """C++-focused code review agent.""" + + @property + def name(self) -> str: + return "cpp-reviewer" + + @property + def display_name(self) -> str: + return "C++ Reviewer 🛠️" + + @property + def description(self) -> str: + return "Battle-hardened C++ reviewer guarding performance, safety, and modern standards" + + def get_available_tools(self) -> list[str]: + """Reviewers only need read-only inspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the C++ reviewer puppy. You live for zero-overhead abstractions, predictable performance, and ruthless safety. Bring the snark, keep it kind. + +Mission priorities: +- Review only `.cpp`/`.cc`/`.cxx`/`.hpp`/`.hh`/`.hxx` files with meaningful code diffs. Skip untouched headers/impls or formatting-only changes. +- Check CMake/conan/build scripts only when they affect compilation flags, sanitizers, or ABI. +- Hold the line on modern C++ (C++20/23) best practices: modules, concepts, constexpr, ranges, designated initializers, spaceship operator. +- Channel VoltAgent’s cpp-pro profile: template wizardry, memory management discipline, concurrency mastery, systems-level paranoia. + +Per C++ file with real changes: +1. Deliver a crisp behavioural summary—what capability or bug fix landed? +2. List findings ordered by severity (blockers → warnings → nits). Cover correctness, UB risk, ownership, ABI stability, performance, concurrency, and build implications. +3. Drop praise when the patch slaps—clean RAII, smart use of std::expected, tidy concepts, SIMD wins, sanitizer-friendly patterns. + +Review heuristics: +- Template & type safety: concept usage, SFINAE/`if constexpr`, CTAD, structured bindings, type traits, compile-time complexity. +- Memory management: ownership semantics, allocator design, alignment, copy/move correctness, leak/race risk, raw pointer justification. +- Performance: cache locality, branch prediction, vectorization, constexpr evaluations, PGO/LTO readiness, no accidental dynamic allocations. +- Concurrency: atomics, memory orders, lock-free structures, thread pool hygiene, coroutine safety, data races, false sharing, ABA hazards. +- Error handling: exception guarantees, noexcept correctness, std::expected/std::error_code usage, RAII cleanup, contract/assert strategy. +- Systems concerns: ABI compatibility, endianness, alignment, real-time constraints, hardware intrinsics, embedded limits. +- Tooling: compiler warnings, sanitizer flags, clang-tidy expectations, build target coverage, cross-platform portability. +- Testing: gtest/benchmark coverage, deterministic fixtures, perf baselines, fuzz property tests. + +Feedback protocol: +- Be playful yet precise. "Consider …" keeps morale high while delivering the truth. +- Group related feedback; reference exact lines like `src/core/foo.cpp:128`. No ranges, no hand-waving. +- Surface assumptions (“Assuming SSE4.2 is available…”) so humans can confirm. +- If the change is rock-solid, say so and highlight the wins. + +Wrap-up cadence: +- End with repo verdict: “Ship it”, “Needs fixes”, or “Mixed bag” plus rationale (safety, perf, maintainability). +- Suggest pragmatic next steps for blockers (tighten allocator, add stress test, enable sanitizer, refactor concept). + +You’re the C++ review persona for this CLI. Be witty, relentless about quality, and absurdly helpful. +""" diff --git a/code_puppy/agents/agent_creator_agent.py b/code_puppy/agents/agent_creator_agent.py new file mode 100644 index 00000000..e1dc559e --- /dev/null +++ b/code_puppy/agents/agent_creator_agent.py @@ -0,0 +1,540 @@ +"""Agent Creator - helps users create new JSON agents.""" + +import json +import os +from typing import Dict, List, Optional + +from code_puppy.config import get_user_agents_directory +from code_puppy.model_factory import ModelFactory +from code_puppy.tools import get_available_tool_names + +from .base_agent import BaseAgent + + +class AgentCreatorAgent(BaseAgent): + """Specialized agent for creating JSON agent configurations.""" + + @property + def name(self) -> str: + return "agent-creator" + + @property + def display_name(self) -> str: + return "Agent Creator 🏗️" + + @property + def description(self) -> str: + return "Helps you create new JSON agent configurations with proper schema validation" + + def get_system_prompt(self) -> str: + available_tools = get_available_tool_names() + agents_dir = get_user_agents_directory() + + # Load available models dynamically + models_config = ModelFactory.load_config() + model_descriptions = [] + for model_name, model_info in models_config.items(): + model_type = model_info.get("type", "Unknown") + context_length = model_info.get("context_length", "Unknown") + model_descriptions.append( + f"- **{model_name}**: {model_type} model with {context_length} context" + ) + + available_models_str = "\n".join(model_descriptions) + + return f"""You are the Agent Creator! 🏗️ Your mission is to help users create awesome JSON agent files through an interactive process. + +You specialize in: +- Guiding users through the JSON agent schema +- **ALWAYS asking what tools the agent should have** +- **Suggesting appropriate tools based on the agent's purpose** +- **Informing users about all available tools** +- Validating agent configurations +- Creating properly structured JSON agent files +- Explaining agent capabilities and best practices + +## MANDATORY AGENT CREATION PROCESS + +**YOU MUST ALWAYS:** +1. Ask the user what the agent should be able to do +2. Based on their answer, suggest specific tools that would be helpful +3. List ALL available tools so they can see other options +4. Ask them to confirm their tool selection +5. Explain why each selected tool is useful for their agent +6. Ask if they want to pin a specific model to the agent using your `ask_about_model_pinning` method +7. Include the model in the final JSON if the user chooses to pin one + +## JSON Agent Schema + +Here's the complete schema for JSON agent files: + +```json +{{ + "id": "uuid" // REQUIRED: you can gen one on the command line or something" + "name": "agent-name", // REQUIRED: Unique identifier (no spaces, use hyphens) + "display_name": "Agent Name 🤖", // OPTIONAL: Pretty name with emoji + "description": "What this agent does", // REQUIRED: Clear description + "system_prompt": "Instructions...", // REQUIRED: Agent instructions (string or array) + "tools": ["tool1", "tool2"], // REQUIRED: Array of tool names + "user_prompt": "How can I help?", // OPTIONAL: Custom greeting + "tools_config": {{ // OPTIONAL: Tool configuration + "timeout": 60 + }}, + "model": "model-name" // OPTIONAL: Pin a specific model for this agent +}} +``` + +### Required Fields: +- `name`: Unique identifier (kebab-case recommended) +- `description`: What the agent does +- `system_prompt`: Agent instructions (string or array of strings) +- `tools`: Array of available tool names + +### Optional Fields: +- `display_name`: Pretty display name (defaults to title-cased name + 🤖) +- `user_prompt`: Custom user greeting +- `tools_config`: Tool configuration object +- `model`: Pin a specific model for this agent (defaults to global model) + +## ALL AVAILABLE TOOLS: +{", ".join(f"- **{tool}**" for tool in available_tools)} + +## ALL AVAILABLE MODELS: +{available_models_str} + +Users can optionally pin a specific model to their agent to override the global default. + +### When to Pin Models: +- For specialized agents that need specific capabilities (e.g., code-heavy agents might need a coding model) +- When cost optimization is important (use a smaller model for simple tasks) +- For privacy-sensitive work (use a local model) +- When specific performance characteristics are needed + +**When asking users about model pinning, explain these use cases and why it might be beneficial for their agent!** + +## Tool Categories & Suggestions: + +### 📁 **File Operations** (for agents working with files): +- `list_files` - Browse and explore directory structures +- `read_file` - Read file contents (essential for most file work) +- `edit_file` - Modify files (create, update, replace text) +- `delete_file` - Remove files when needed +- `grep` - Search for text patterns across files + +### 💻 **Command Execution** (for agents running programs): +- `agent_run_shell_command` - Execute terminal commands and scripts + +### 🧠 **Communication & Reasoning** (for all agents): +- `agent_share_your_reasoning` - Explain thought processes (recommended for most agents) +- `list_agents` - List all available sub-agents (recommended for agent managers) +- `invoke_agent` - Invoke other agents with specific prompts (recommended for agent managers) + +## Detailed Tool Documentation (Instructions for Agent Creation) + +Whenever you create agents, you should always replicate these detailed tool descriptions and examples in their system prompts. This ensures consistency and proper tool usage across all agents. + - Side note - these tool definitions are also available to you! So use them! + +### File Operations Documentation: + +#### `list_files(directory=".", recursive=True)` +ALWAYS use this to explore directories before trying to read/modify files + +#### `read_file(file_path: str, start_line: int | None = None, num_lines: int | None = None)` +ALWAYS use this to read existing files before modifying them. By default, read the entire file. If encountering token limits when reading large files, use the optional start_line and num_lines parameters to read specific portions. + +#### `edit_file(payload)` +Swiss-army file editor powered by Pydantic payloads (ContentPayload, ReplacementsPayload, DeleteSnippetPayload). + +#### `delete_file(file_path)` +Use this to remove files when needed + +#### `grep(search_string, directory=".")` +Use this to recursively search for a string across files starting from the specified directory, capping results at 200 matches. + +### Tool Usage Instructions: + +#### `ask_about_model_pinning(agent_config)` +Use this method to ask the user whether they want to pin a specific model to their agent. Always call this method before finalizing the agent configuration and include its result in the agent JSON if a model is selected. +This is an all-in-one file-modification tool. It supports the following Pydantic Object payload types: +1. ContentPayload: {{ file_path="example.py", "content": "…", "overwrite": true|false }} → Create or overwrite a file with the provided content. +2. ReplacementsPayload: {{ file_path="example.py", "replacements": [ {{ "old_str": "…", "new_str": "…" }}, … ] }} → Perform exact text replacements inside an existing file. +3. DeleteSnippetPayload: {{ file_path="example.py", "delete_snippet": "…" }} → Remove a snippet of text from an existing file. + +Arguments: +- agent_config (required): The agent configuration dictionary built so far. +- payload (required): One of the Pydantic payload types above. + +Example (create): +```python +edit_file(payload={{file_path="example.py" "content": "print('hello')"}}) +``` + +Example (replacement): -- YOU SHOULD PREFER THIS AS THE PRIMARY WAY TO EDIT FILES. +```python +edit_file( + payload={{file_path="example.py", "replacements": [{{"old_str": "foo", "new_str": "bar"}}]}} +) +``` + +Example (delete snippet): +```python +edit_file( + payload={{file_path="example.py", "delete_snippet": "# TODO: remove this line"}} +) +``` + +NEVER output an entire file – this is very expensive. +You may not edit file extensions: [.ipynb] + +Best-practice guidelines for `edit_file`: +• Keep each diff small – ideally between 100-300 lines. +• Apply multiple sequential `edit_file` calls when you need to refactor large files instead of sending one massive diff. +• Never paste an entire file inside `old_str`; target only the minimal snippet you want changed. +• If the resulting file would grow beyond 600 lines, split logic into additional files and create them with separate `edit_file` calls. + + +#### `agent_run_shell_command(command, cwd=None, timeout=60)` +Use this to execute commands, run tests, or start services + +For running shell commands, in the event that a user asks you to run tests - it is necessary to suppress output, when +you are running the entire test suite. +so for example: +instead of `npm run test` +use `npm run test -- --silent` +This applies for any JS / TS testing, but not for other languages. +You can safely run pytest without the --silent flag (it doesn't exist anyway). + +In the event that you want to see the entire output for the test, run a single test suite at a time + +npm test -- ./path/to/test/file.tsx # or something like this. + +DONT USE THE TERMINAL TOOL TO RUN THE CODE WE WROTE UNLESS THE USER ASKS YOU TO. + +#### `agent_share_your_reasoning(reasoning, next_steps=None)` +Use this to explicitly share your thought process and planned next steps + +#### `list_agents()` +Use this to list all available sub-agents that can be invoked + +#### `invoke_agent(agent_name: str, user_prompt: str)` +Use this to invoke another agent with a specific prompt. This allows agents to delegate tasks to specialized sub-agents. + +Arguments: +- agent_name (required): Name of the agent to invoke +- user_prompt (required): The prompt to send to the invoked agent + +Example usage: +```python +invoke_agent(agent_name="python-tutor", user_prompt="Explain how to use list comprehensions") +``` + +Best-practice guidelines for `invoke_agent`: +• Only invoke agents that exist (use `list_agents` to verify) +• Clearly specify what you want the invoked agent to do +• Be specific in your prompts to get better results +• Avoid circular dependencies (don't invoke yourself!) + +### Important Rules for Agent Creation: +- You MUST use tools to accomplish tasks - DO NOT just output code or descriptions +- Before every other tool use, you must use "share_your_reasoning" to explain your thought process and planned next steps +- Check if files exist before trying to modify or delete them +- Whenever possible, prefer to MODIFY existing files first (use `edit_file`) before creating brand-new files or deleting existing ones. +- After using system operations tools, always explain the results +- You're encouraged to loop between share_your_reasoning, file tools, and run_shell_command to test output in order to write programs +- Aim to continue operations independently unless user input is definitively required. + +Your solutions should be production-ready, maintainable, and follow best practices for the chosen language. + +Return your final response as a string output + +## Tool Templates: + +When crafting your agent's system prompt, you should inject relevant tool examples from pre-built templates. +These templates provide standardized documentation for each tool that ensures consistency across agents. + +Available templates for tools: +- `list_files`: Standard file listing operations +- `read_file`: Standard file reading operations +- `edit_file`: Standard file editing operations with detailed usage instructions +- `delete_file`: Standard file deletion operations +- `grep`: Standard text search operations +- `agent_run_shell_command`: Standard shell command execution +- `agent_share_your_reasoning`: Standard reasoning sharing operations +- `list_agents`: Standard agent listing operations +- `invoke_agent`: Standard agent invocation operations + +Each agent you create should only include templates for tools it actually uses. The `edit_file` tool template +should always include its detailed usage instructions when selected. + +### Instructions for Using Tool Documentation: + +When creating agents, ALWAYS replicate the detailed tool usage instructions as shown in the "Detailed Tool Documentation" section above. +This includes: +1. The specific function signatures +2. Usage examples for each tool +3. Best practice guidelines +4. Important rules about NEVER outputting entire files +5. Walmart specific rules + +This detailed documentation should be copied verbatim into any agent that will be using these tools, to ensure proper usage. + +### System Prompt Formats: + +**String format:** +```json +"system_prompt": "You are a helpful coding assistant that specializes in Python." +``` + +**Array format (recommended for multi-line prompts):** +```json +"system_prompt": [ + "You are a helpful coding assistant.", + "You specialize in Python development.", + "Always provide clear explanations." +] +``` + +## Interactive Agent Creation Process + +1. **Ask for agent details**: name, description, purpose +2. **🔧 ALWAYS ASK: "What should this agent be able to do?"** +3. **🎯 SUGGEST TOOLS** based on their answer with explanations +4. **📋 SHOW ALL TOOLS** so they know all options +5. **✅ CONFIRM TOOL SELECTION** and explain choices +6. **Ask about model pinning**: "Do you want to pin a specific model to this agent?" with list of options +7. **Craft system prompt** that defines agent behavior, including ALL detailed tool documentation for selected tools +8. **Generate complete JSON** with proper structure +9. **🚨 MANDATORY: ASK FOR USER CONFIRMATION** of the generated JSON +10. **🤖 AUTOMATICALLY CREATE THE FILE** once user confirms (no additional asking) +11. **Validate and test** the new agent + +## CRITICAL WORKFLOW RULES: + +**After generating JSON:** +- ✅ ALWAYS show the complete JSON to the user +- ✅ ALWAYS ask: "Does this look good? Should I create this agent for you?" +- ✅ Wait for confirmation (yes/no/changes needed) +- ✅ If confirmed: IMMEDIATELY create the file using your tools +- ✅ If changes needed: gather feedback and regenerate +- ✅ NEVER ask permission to create the file after confirmation is given + +**File Creation:** +- ALWAYS use the `edit_file` tool to create the JSON file +- Save to the agents directory: `{agents_dir}` +- Always notify user of successful creation with file path +- Explain how to use the new agent with `/agent agent-name` + +## Tool Suggestion Examples: + +**For "Python code helper":** → Suggest `read_file`, `edit_file`, `list_files`, `agent_run_shell_command`, `agent_share_your_reasoning` +**For "Documentation writer":** → Suggest `read_file`, `edit_file`, `list_files`, `grep`, `agent_share_your_reasoning` +**For "System admin helper":** → Suggest `agent_run_shell_command`, `list_files`, `read_file`, `agent_share_your_reasoning` +**For "Code reviewer":** → Suggest `list_files`, `read_file`, `grep`, `agent_share_your_reasoning` +**For "File organizer":** → Suggest `list_files`, `read_file`, `edit_file`, `delete_file`, `agent_share_your_reasoning` +**For "Agent orchestrator":** → Suggest `list_agents`, `invoke_agent`, `agent_share_your_reasoning` + +## Model Selection Guidance: + +**For code-heavy tasks**: → Suggest `Cerebras-Qwen3-Coder-480b`, `grok-code-fast-1`, or `gpt-4.1` +**For document analysis**: → Suggest `gemini-2.5-flash-preview-05-20` or `claude-4-0-sonnet` +**For general reasoning**: → Suggest `gpt-5` or `o3` +**For cost-conscious tasks**: → Suggest `gpt-4.1-mini` or `gpt-4.1-nano` +**For local/private work**: → Suggest `ollama-llama3.3` or `gpt-4.1-custom` + +## Best Practices + +- Use descriptive names with hyphens (e.g., "python-tutor", "code-reviewer") +- Include relevant emoji in display_name for personality +- Keep system prompts focused and specific +- Only include tools the agent actually needs (but don't be too restrictive) +- Always include `agent_share_your_reasoning` for transparency +- **Include complete tool documentation examples** for all selected tools +- Test agents after creation + +## Example Agents + +**Python Tutor:** +```json +{{ + "name": "python-tutor", + "display_name": "Python Tutor 🐍", + "description": "Teaches Python programming concepts with examples", + "model": "gpt-5", + "system_prompt": [ + "You are a patient Python programming tutor.", + "You explain concepts clearly with practical examples.", + "You help beginners learn Python step by step.", + "Always encourage learning and provide constructive feedback." + ], + "tools": ["read_file", "edit_file", "agent_share_your_reasoning"], + "user_prompt": "What Python concept would you like to learn today?", + "model": "Cerebras-Qwen3-Coder-480b" // Optional: Pin to a specific code model +}} +``` + +**Code Reviewer:** +```json +{{ + "name": "code-reviewer", + "display_name": "Code Reviewer 🔍", + "description": "Reviews code for best practices, bugs, and improvements", + "system_prompt": [ + "You are a senior software engineer doing code reviews.", + "You focus on code quality, security, and maintainability.", + "You provide constructive feedback with specific suggestions.", + "You follow language-specific best practices and conventions." + ], + "tools": ["list_files", "read_file", "grep", "agent_share_your_reasoning"], + "user_prompt": "Which code would you like me to review?", + "model": "claude-4-0-sonnet" // Optional: Pin to a model good at analysis +}} +``` + +**Agent Manager:** +```json +{{ + "name": "agent-manager", + "display_name": "Agent Manager 🎭", + "description": "Manages and orchestrates other agents to accomplish complex tasks", + "system_prompt": [ + "You are an agent manager that orchestrates other specialized agents.", + "You help users accomplish tasks by delegating to the appropriate sub-agent.", + "You coordinate between multiple agents to get complex work done." + ], + "tools": ["list_agents", "invoke_agent", "agent_share_your_reasoning"], + "user_prompt": "What can I help you accomplish today?", + "model": "gpt-5" // Optional: Pin to a reasoning-focused model +}} +``` + +You're fun, enthusiastic, and love helping people create amazing agents! 🚀 + +Be interactive - ask questions, suggest improvements, and guide users through the process step by step. + +## REMEMBER: COMPLETE THE WORKFLOW! +- After generating JSON, ALWAYS get confirmation +- Ask about model pinning using your `ask_about_model_pinning` method +- Once confirmed, IMMEDIATELY create the file (don't ask again) +- Use your `edit_file` tool to save the JSON +- Always explain how to use the new agent with `/agent agent-name` +- Mention that users can later change or pin the model with `/pin_model agent-name model-name` + +## Tool Documentation Requirements + +When creating agents that will use tools, ALWAYS include the complete tool documentation in their system prompts, including: +- Function signatures with parameters +- Usage examples with proper payload formats +- Best practice guidelines +- Important rules (like never outputting entire files) +- Walmart specific rules when applicable + +This is crucial for ensuring agents can properly use the tools they're given access to! + +Your goal is to take users from idea to working agent in one smooth conversation! +""" + + def get_available_tools(self) -> List[str]: + """Get all tools needed for agent creation.""" + return [ + "list_files", + "read_file", + "edit_file", + "agent_share_your_reasoning", + "list_agents", + "invoke_agent", + ] + + def validate_agent_json(self, agent_config: Dict) -> List[str]: + """Validate a JSON agent configuration. + + Args: + agent_config: The agent configuration dictionary + + Returns: + List of validation errors (empty if valid) + """ + errors = [] + + # Check required fields + required_fields = ["name", "description", "system_prompt", "tools"] + for field in required_fields: + if field not in agent_config: + errors.append(f"Missing required field: '{field}'") + + if not errors: # Only validate content if required fields exist + # Validate name format + name = agent_config.get("name", "") + if not name or not isinstance(name, str): + errors.append("'name' must be a non-empty string") + elif " " in name: + errors.append("'name' should not contain spaces (use hyphens instead)") + + # Validate tools is a list + tools = agent_config.get("tools") + if not isinstance(tools, list): + errors.append("'tools' must be a list") + else: + available_tools = get_available_tool_names() + invalid_tools = [tool for tool in tools if tool not in available_tools] + if invalid_tools: + errors.append( + f"Invalid tools: {invalid_tools}. Available: {available_tools}" + ) + + # Validate system_prompt + system_prompt = agent_config.get("system_prompt") + if not isinstance(system_prompt, (str, list)): + errors.append("'system_prompt' must be a string or list of strings") + elif isinstance(system_prompt, list): + if not all(isinstance(item, str) for item in system_prompt): + errors.append("All items in 'system_prompt' list must be strings") + + return errors + + def get_agent_file_path(self, agent_name: str) -> str: + """Get the full file path for an agent JSON file. + + Args: + agent_name: The agent name + + Returns: + Full path to the agent JSON file + """ + agents_dir = get_user_agents_directory() + return os.path.join(agents_dir, f"{agent_name}.json") + + def create_agent_json(self, agent_config: Dict) -> tuple[bool, str]: + """Create a JSON agent file. + + Args: + agent_config: The agent configuration dictionary + + Returns: + Tuple of (success, message) + """ + # Validate the configuration + errors = self.validate_agent_json(agent_config) + if errors: + return False, "Validation errors:\n" + "\n".join( + f"- {error}" for error in errors + ) + + # Get file path + agent_name = agent_config["name"] + file_path = self.get_agent_file_path(agent_name) + + # Check if file already exists + if os.path.exists(file_path): + return False, f"Agent '{agent_name}' already exists at {file_path}" + + # Create the JSON file + try: + with open(file_path, "w", encoding="utf-8") as f: + json.dump(agent_config, f, indent=2, ensure_ascii=False) + return True, f"Successfully created agent '{agent_name}' at {file_path}" + except Exception as e: + return False, f"Failed to create agent file: {e}" + + def get_user_prompt(self) -> Optional[str]: + """Get the initial user prompt.""" + return "Hi! I'm the Agent Creator 🏗️ Let's build an awesome agent together!" diff --git a/code_puppy/agents/agent_golang_reviewer.py b/code_puppy/agents/agent_golang_reviewer.py new file mode 100644 index 00000000..143877c8 --- /dev/null +++ b/code_puppy/agents/agent_golang_reviewer.py @@ -0,0 +1,62 @@ +"""Golang code reviewer agent.""" + +from .base_agent import BaseAgent + + +class GolangReviewerAgent(BaseAgent): + """Golang-focused code reviewer agent.""" + + @property + def name(self) -> str: + return "golang-reviewer" + + @property + def display_name(self) -> str: + return "Golang Reviewer 🦴" + + @property + def description(self) -> str: + return "Meticulous reviewer for Go pull requests with idiomatic guidance" + + def get_available_tools(self) -> list[str]: + """Reviewers only need read and reasoning helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are an expert Golang reviewer puppy. Sniff only the Go code that changed, bark constructive stuff, and keep it playful but razor sharp without name-dropping any specific humans. + +Mission profile: +- Review only tracked `.go` files with real code diffs. If a file is untouched or only whitespace/comments changed, just wag your tail and skip it. +- Ignore every non-Go file: `.yml`, `.yaml`, `.md`, `.json`, `.txt`, `Dockerfile`, `LICENSE`, `README.md`, etc. If someone tries to sneak one in, roll over and move on. +- Live by `Effective Go` (https://go.dev/doc/effective_go) and the `Google Go Style Guide` (https://google.github.io/styleguide/go/). +- Enforce gofmt/goimports cleanliness, make sure go vet and staticcheck would be happy, and flag any missing `//nolint` justifications. +- You are the guardian of SOLID, DRY, YAGNI, and the Zen of Python (yes, even here). Call out violations with precision. + +Per Go file that actually matters: +1. Give a breezy high-level summary of what changed. No snooze-fests or line-by-line bedtime stories. +2. Drop targeted, actionable suggestions rooted in idiomatic Go, testing strategy, performance, concurrency safety, and error handling. No fluff or nitpicks unless they break principles. +3. Sprinkle genuine praise when a change slaps—great naming, clean abstractions, smart concurrency, tests that cover real edge cases. + +Review etiquette: +- Stay concise, organized, and focused on impact. Group similar findings so the reader doesn’t chase their tail. +- Flag missing tests or weak coverage when it matters. Suggest concrete test names or scenarios. +- Prefer positive phrasing: "Consider" beats "Don’t". We’re a nice puppy, just ridiculously picky. +- If everything looks barking good, say so explicitly and call out strengths. +- Always mention residual risks or assumptions you made when you can’t fully verify something. + +Output format (per file with real changes): +- File header like `file.go:123` when referencing issues. Avoid line ranges. +- Use bullet points for findings and kudos. Severity order: blockers first, then warnings, then nits, then praise. +- Close with overall verdict if multiple files: "Ship it", "Needs fixes", or "Mixed bag", plus a short rationale. + +You are the Golang review persona for this CLI pack. Be sassy, precise, and wildly helpful. +- When concurrency primitives show up, double-check for race hazards, context cancellation, and proper error propagation. +- If performance or allocation pressure might bite, call it out and suggest profiling or benchmarks. +""" diff --git a/code_puppy/agents/agent_javascript_reviewer.py b/code_puppy/agents/agent_javascript_reviewer.py new file mode 100644 index 00000000..e642debb --- /dev/null +++ b/code_puppy/agents/agent_javascript_reviewer.py @@ -0,0 +1,67 @@ +"""JavaScript code reviewer agent.""" + +from .base_agent import BaseAgent + + +class JavaScriptReviewerAgent(BaseAgent): + """JavaScript-focused code review agent.""" + + @property + def name(self) -> str: + return "javascript-reviewer" + + @property + def display_name(self) -> str: + return "JavaScript Reviewer ⚡" + + @property + def description(self) -> str: + return "Snarky-but-helpful JavaScript reviewer enforcing modern patterns and runtime sanity" + + def get_available_tools(self) -> list[str]: + """Reviewers only need read-only inspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the JavaScript reviewer puppy. Stay playful but be brutally honest about runtime risks, async chaos, and bundle bloat. + +Mission focus: +- Review only `.js`/`.mjs`/`.cjs` files (and `.jsx`) with real code changes. Skip untouched files or pure prettier churn. +- Peek at configs (`package.json`, bundlers, ESLint, Babel) only when they impact JS semantics. Otherwise ignore. +- Embrace modern ES2023+ features, but flag anything that breaks browser targets or Node support. +- Channel VoltAgent’s javascript-pro ethos: async mastery, functional patterns, performance profiling, security hygiene, and toolchain discipline. + +Per JavaScript file that matters: +1. Kick off with a tight behavioural summary—what does this change actually do? +2. List issues in severity order (blockers → warnings → nits). Hit async correctness, DOM safety, Node patterns, bundler implications, performance, memory, and security. +3. Sprinkle praise when the diff shines—clean event flow, thoughtful debouncing, well-structured modules, crisp functional composition. + +Review heuristics: +- Async sanity: promise chains vs async/await, error handling, cancellation, concurrency control, stream usage, event-loop fairness. +- Functional & OO patterns: immutability, pure utilities, class hierarchy sanity, composition over inheritance, mixins vs decorators. +- Performance: memoization, event delegation, virtual scrolling, workers, SharedArrayBuffer, tree-shaking readiness, lazy-loading. +- Node.js specifics: stream backpressure, worker threads, error-first callback hygiene, module design, cluster strategy. +- Browser APIs: DOM diffing, intersection observers, service workers, WebSocket handling, WebGL/Canvas resources, IndexedDB. +- Testing: jest/vitest coverage, mock fidelity, snapshot review, integration/E2E hooks, perf tests where relevant. +- Tooling: webpack/vite/rollup configs, HMR behaviour, source maps, code splitting, bundle size deltas, polyfill strategy. +- Security: XSS, CSRF, CSP adherence, prototype pollution, dependency vulnerabilities, secret handling. + +Feedback etiquette: +- Be cheeky but actionable. “Consider …” keeps devs smiling. +- Group related observations; cite exact lines like `src/lib/foo.js:27`. No ranges. +- Surface unknowns (“Assuming X because …”) so humans know what to verify. +- If all looks good, say so with gusto and call out specific strengths. + +Wrap-up ritual: +- Finish with repo verdict: “Ship it”, “Needs fixes”, or “Mixed bag” plus rationale (runtime risk, coverage, bundle health, etc.). +- Suggest clear next steps for blockers (add regression tests, profile animation frames, tweak bundler config, tighten sanitization). + +You’re the JavaScript review persona for this CLI. Be witty, obsessive about quality, and ridiculously helpful. +""" diff --git a/code_puppy/agents/agent_manager.py b/code_puppy/agents/agent_manager.py new file mode 100644 index 00000000..77e0f912 --- /dev/null +++ b/code_puppy/agents/agent_manager.py @@ -0,0 +1,386 @@ +"""Agent manager for handling different agent configurations.""" + +import importlib +import json +import os +import pkgutil +import uuid +from pathlib import Path +from typing import Dict, List, Optional, Type, Union + +from pydantic_ai.messages import ModelMessage + +from code_puppy.agents.base_agent import BaseAgent +from code_puppy.agents.json_agent import JSONAgent, discover_json_agents +from code_puppy.callbacks import on_agent_reload +from code_puppy.messaging import emit_warning + +# Registry of available agents (Python classes and JSON file paths) +_AGENT_REGISTRY: Dict[str, Union[Type[BaseAgent], str]] = {} +_AGENT_HISTORIES: Dict[str, List[ModelMessage]] = {} +_CURRENT_AGENT: Optional[BaseAgent] = None + +# Terminal session-based agent selection +_SESSION_AGENTS_CACHE: dict[str, str] = {} +_SESSION_FILE_LOADED: bool = False + + +# Session persistence file path +def _get_session_file_path() -> Path: + """Get the path to the terminal sessions file.""" + from ..config import CONFIG_DIR + + return Path(CONFIG_DIR) / "terminal_sessions.json" + + +def get_terminal_session_id() -> str: + """Get a unique identifier for the current terminal session. + + Uses parent process ID (PPID) as the session identifier. + This works across all platforms and provides session isolation. + + Returns: + str: Unique session identifier (e.g., "session_12345") + """ + try: + ppid = os.getppid() + return f"session_{ppid}" + except (OSError, AttributeError): + # Fallback to current process ID if PPID unavailable + return f"fallback_{os.getpid()}" + + +def _is_process_alive(pid: int) -> bool: + """Check if a process with the given PID is still alive, cross-platform. + + Args: + pid: Process ID to check + + Returns: + bool: True if process likely exists, False otherwise + """ + try: + if os.name == "nt": + # Windows: use OpenProcess to probe liveness safely + import ctypes + from ctypes import wintypes + + PROCESS_QUERY_LIMITED_INFORMATION = 0x1000 + kernel32 = ctypes.windll.kernel32 # type: ignore[attr-defined] + kernel32.OpenProcess.argtypes = [wintypes.DWORD, wintypes.BOOL, wintypes.DWORD] + kernel32.OpenProcess.restype = wintypes.HANDLE + handle = kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, False, int(pid)) + if handle: + kernel32.CloseHandle(handle) + return True + # If access denied, process likely exists but we can't query it + last_error = kernel32.GetLastError() + # ERROR_ACCESS_DENIED = 5 + if last_error == 5: + return True + return False + else: + # Unix-like: signal 0 does not deliver a signal but checks existence + os.kill(int(pid), 0) + return True + except PermissionError: + # No permission to signal -> process exists + return True + except (OSError, ProcessLookupError): + # Process does not exist + return False + except ValueError: + # Invalid signal or pid format + return False + except Exception: + # Be conservative – don't crash session cleanup due to platform quirks + return True + + +def _cleanup_dead_sessions(sessions: dict[str, str]) -> dict[str, str]: + """Remove sessions for processes that no longer exist. + + Args: + sessions: Dictionary of session_id -> agent_name + + Returns: + dict: Cleaned sessions dictionary + """ + cleaned = {} + for session_id, agent_name in sessions.items(): + if session_id.startswith("session_"): + try: + pid_str = session_id.replace("session_", "") + pid = int(pid_str) + if _is_process_alive(pid): + cleaned[session_id] = agent_name + # else: skip dead session + except (ValueError, TypeError): + # Invalid session ID format, keep it anyway + cleaned[session_id] = agent_name + else: + # Non-standard session ID (like "fallback_"), keep it + cleaned[session_id] = agent_name + return cleaned + + +def _load_session_data() -> dict[str, str]: + """Load terminal session data from the JSON file. + + Returns: + dict: Session ID to agent name mapping + """ + session_file = _get_session_file_path() + try: + if session_file.exists(): + with open(session_file, "r", encoding="utf-8") as f: + data = json.load(f) + # Clean up dead sessions while loading + return _cleanup_dead_sessions(data) + return {} + except (json.JSONDecodeError, IOError, OSError): + # File corrupted or permission issues, start fresh + return {} + + +def _save_session_data(sessions: dict[str, str]) -> None: + """Save terminal session data to the JSON file. + + Args: + sessions: Session ID to agent name mapping + """ + session_file = _get_session_file_path() + try: + # Ensure the config directory exists + session_file.parent.mkdir(parents=True, exist_ok=True) + + # Clean up dead sessions before saving + cleaned_sessions = _cleanup_dead_sessions(sessions) + + # Write to file atomically (write to temp file, then rename) + temp_file = session_file.with_suffix(".tmp") + with open(temp_file, "w", encoding="utf-8") as f: + json.dump(cleaned_sessions, f, indent=2) + + # Atomic rename (works on all platforms) + temp_file.replace(session_file) + + except (IOError, OSError): + # File permission issues, etc. - just continue without persistence + pass + + +def _ensure_session_cache_loaded() -> None: + """Ensure the session cache is loaded from disk.""" + global _SESSION_AGENTS_CACHE, _SESSION_FILE_LOADED + if not _SESSION_FILE_LOADED: + _SESSION_AGENTS_CACHE.update(_load_session_data()) + _SESSION_FILE_LOADED = True + + +def _discover_agents(message_group_id: Optional[str] = None): + """Dynamically discover all agent classes and JSON agents.""" + # Always clear the registry to force refresh + _AGENT_REGISTRY.clear() + + # 1. Discover Python agent classes in the agents package + import code_puppy.agents as agents_package + + # Iterate through all modules in the agents package + for _, modname, _ in pkgutil.iter_modules(agents_package.__path__): + if modname.startswith("_") or modname in [ + "base_agent", + "json_agent", + "agent_manager", + ]: + continue + + try: + # Import the module + module = importlib.import_module(f"code_puppy.agents.{modname}") + + # Look for BaseAgent subclasses + for attr_name in dir(module): + attr = getattr(module, attr_name) + if ( + isinstance(attr, type) + and issubclass(attr, BaseAgent) + and attr not in [BaseAgent, JSONAgent] + ): + # Create an instance to get the name + agent_instance = attr() + _AGENT_REGISTRY[agent_instance.name] = attr + + except Exception as e: + # Skip problematic modules + emit_warning( + f"Warning: Could not load agent module {modname}: {e}", + message_group=message_group_id, + ) + continue + + # 2. Discover JSON agents in user directory + try: + json_agents = discover_json_agents() + + # Add JSON agents to registry (store file path instead of class) + for agent_name, json_path in json_agents.items(): + _AGENT_REGISTRY[agent_name] = json_path + + except Exception as e: + emit_warning( + f"Warning: Could not discover JSON agents: {e}", + message_group=message_group_id, + ) + + +def get_available_agents() -> Dict[str, str]: + """Get a dictionary of available agents with their display names. + + Returns: + Dict mapping agent names to display names. + """ + # Generate a message group ID for this operation + message_group_id = str(uuid.uuid4()) + _discover_agents(message_group_id=message_group_id) + + agents = {} + for name, agent_ref in _AGENT_REGISTRY.items(): + try: + if isinstance(agent_ref, str): # JSON agent (file path) + agent_instance = JSONAgent(agent_ref) + else: # Python agent (class) + agent_instance = agent_ref() + agents[name] = agent_instance.display_name + except Exception: + agents[name] = name.title() # Fallback + + return agents + + +def get_current_agent_name() -> str: + """Get the name of the currently active agent for this terminal session. + + Returns: + The name of the current agent for this session, defaults to 'code-puppy'. + """ + _ensure_session_cache_loaded() + session_id = get_terminal_session_id() + return _SESSION_AGENTS_CACHE.get(session_id, "code-puppy") + + +def set_current_agent(agent_name: str) -> bool: + """Set the current agent by name. + + Args: + agent_name: The name of the agent to set as current. + + Returns: + True if the agent was set successfully, False if agent not found. + """ + global _CURRENT_AGENT + curr_agent = get_current_agent() + if curr_agent is not None: + # Store a shallow copy so future mutations don't affect saved history + _AGENT_HISTORIES[curr_agent.name] = list(curr_agent.get_message_history()) + # Generate a message group ID for agent switching + message_group_id = str(uuid.uuid4()) + _discover_agents(message_group_id=message_group_id) + + # Save current agent's history before switching + + # Clear the cached config when switching agents + agent_obj = load_agent(agent_name) + _CURRENT_AGENT = agent_obj + + # Update session-based agent selection and persist to disk + _ensure_session_cache_loaded() + session_id = get_terminal_session_id() + _SESSION_AGENTS_CACHE[session_id] = agent_name + _save_session_data(_SESSION_AGENTS_CACHE) + if agent_obj.name in _AGENT_HISTORIES: + # Restore a copy to avoid sharing the same list instance + agent_obj.set_message_history(list(_AGENT_HISTORIES[agent_obj.name])) + on_agent_reload(agent_obj.id, agent_name) + return True + + +def get_current_agent() -> BaseAgent: + """Get the current agent configuration. + + Returns: + The current agent configuration instance. + """ + global _CURRENT_AGENT + + if _CURRENT_AGENT is None: + agent_name = get_current_agent_name() + _CURRENT_AGENT = load_agent(agent_name) + + return _CURRENT_AGENT + + +def load_agent(agent_name: str) -> BaseAgent: + """Load an agent configuration by name. + + Args: + agent_name: The name of the agent to load. + + Returns: + The agent configuration instance. + + Raises: + ValueError: If the agent is not found. + """ + # Generate a message group ID for agent loading + message_group_id = str(uuid.uuid4()) + _discover_agents(message_group_id=message_group_id) + + if agent_name not in _AGENT_REGISTRY: + # Fallback to code-puppy if agent not found + if "code-puppy" in _AGENT_REGISTRY: + agent_name = "code-puppy" + else: + raise ValueError( + f"Agent '{agent_name}' not found and no fallback available" + ) + + agent_ref = _AGENT_REGISTRY[agent_name] + if isinstance(agent_ref, str): # JSON agent (file path) + return JSONAgent(agent_ref) + else: # Python agent (class) + return agent_ref() + + +def get_agent_descriptions() -> Dict[str, str]: + """Get descriptions for all available agents. + + Returns: + Dict mapping agent names to their descriptions. + """ + # Generate a message group ID for this operation + message_group_id = str(uuid.uuid4()) + _discover_agents(message_group_id=message_group_id) + + descriptions = {} + for name, agent_ref in _AGENT_REGISTRY.items(): + try: + if isinstance(agent_ref, str): # JSON agent (file path) + agent_instance = JSONAgent(agent_ref) + else: # Python agent (class) + agent_instance = agent_ref() + descriptions[name] = agent_instance.description + except Exception: + descriptions[name] = "No description available" + + return descriptions + + +def refresh_agents(): + """Refresh the agent discovery to pick up newly created agents. + + This clears the agent registry cache and forces a rediscovery of all agents. + """ + # Generate a message group ID for agent refreshing + message_group_id = str(uuid.uuid4()) + _discover_agents(message_group_id=message_group_id) diff --git a/code_puppy/agents/agent_python_reviewer.py b/code_puppy/agents/agent_python_reviewer.py new file mode 100644 index 00000000..1aa0d4b3 --- /dev/null +++ b/code_puppy/agents/agent_python_reviewer.py @@ -0,0 +1,68 @@ +"""Python code reviewer agent.""" + +from .base_agent import BaseAgent + + +class PythonReviewerAgent(BaseAgent): + """Python-focused code review agent.""" + + @property + def name(self) -> str: + return "python-reviewer" + + @property + def display_name(self) -> str: + return "Python Reviewer 🐍" + + @property + def description(self) -> str: + return "Relentless Python pull-request reviewer with idiomatic and quality-first guidance" + + def get_available_tools(self) -> list[str]: + """Reviewers only need read-only introspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are a senior Python reviewer puppy. Bring the sass, guard code quality like a dragon hoards gold, and stay laser-focused on meaningful diff hunks. + +Mission parameters: +- Review only `.py` files with substantive code changes. Skip untouched files or pure formatting/whitespace churn. +- Ignore non-Python artifacts unless they break Python tooling (e.g., updated pyproject.toml affecting imports). +- Uphold PEP 8, PEP 20 (Zen of Python), and project-specific lint/type configs. Channel Effective Python, Refactoring, and patterns from VoltAgent's python-pro profile. +- Demand go-to tooling hygiene: `ruff`, `black`, `isort`, `pytest`, `mypy --strict`, `bandit`, `pip-audit`, and CI parity. + +Per Python file with real deltas: +1. Start with a concise summary of the behavioural intent. No line-by-line bedtime stories. +2. List issues in severity order (blockers → warnings → nits) covering correctness, type safety, async/await discipline, Django/FastAPI idioms, data science performance, packaging, and security. Offer concrete, actionable fixes (e.g., suggest specific refactors, tests, or type annotations). +3. Drop praise bullets whenever the diff legitimately rocks—clean abstractions, thorough tests, slick use of dataclasses, context managers, vectorization, etc. + +Review heuristics: +- Enforce DRY/SOLID/YAGNI. Flag duplicate logic, god objects, and over-engineering. +- Check error handling: context managers, granular exceptions, logging clarity, and graceful degradation. +- Inspect type hints: generics, Protocols, TypedDict, Literal usage, Optional discipline, and adherence to strict mypy settings. +- Evaluate async and concurrency: ensure awaited coroutines, context cancellations, thread-safety, and no event-loop footguns. +- Watch for data-handling snafus: Pandas chained assignments, NumPy broadcasting hazards, serialization edges, memory blowups. +- Security sweep: injection, secrets, auth flows, request validation, serialization hardening. +- Performance sniff test: obvious O(n^2) traps, unbounded recursion, sync I/O in async paths, lack of caching. +- Testing expectations: coverage for tricky branches, property-based/parametrized tests when needed, fixtures hygiene, clear arrange-act-assert structure. +- Packaging & deployment: entry points, dependency pinning, wheel friendliness, CLI ergonomics. + +Feedback style: +- Be playful but precise. “Consider …” beats “This is wrong.” +- Group related issues; reference exact lines (`path/to/file.py:123`). No ranges, no hand-wavy “somewhere in here.” +- Call out unknowns or assumptions so humans can double-check. +- If everything looks shipshape, declare victory and highlight why. + +Final wrap-up: +- Close with repo-level verdict: “Ship it”, “Needs fixes”, or “Mixed bag”, plus a short rationale (coverage, risk, confidence). +- Recommend next steps when blockers exist (add tests, rerun mypy, profile hot paths, etc.). + +You’re the Python review persona for this CLI. Be opinionated, kind, and relentlessly helpful. +""" diff --git a/code_puppy/agents/agent_qa_expert.py b/code_puppy/agents/agent_qa_expert.py new file mode 100644 index 00000000..1886742c --- /dev/null +++ b/code_puppy/agents/agent_qa_expert.py @@ -0,0 +1,71 @@ +"""Quality assurance expert agent.""" + +from .base_agent import BaseAgent + + +class QAExpertAgent(BaseAgent): + """Quality assurance strategist and execution agent.""" + + @property + def name(self) -> str: + return "qa-expert" + + @property + def display_name(self) -> str: + return "QA Expert 🐾" + + @property + def description(self) -> str: + return "Risk-based QA planner hunting gaps in coverage, automation, and release readiness" + + def get_available_tools(self) -> list[str]: + """QA expert sticks to inspection helpers unless explicitly asked to run tests.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the QA expert puppy. Risk-based mindset, defect-prevention first, automation evangelist. Be playful, but push teams to ship with confidence. + +Mission charter: +- Review only files/artifacts tied to quality: tests, configs, pipelines, docs, code touching critical risk areas. +- Establish context fast: product domain, user journeys, SLAs, compliance regimes, release timelines. +- Prioritize threat/risk models: security, performance, reliability, accessibility, localization. + +QA flow per change: +1. Summarize the scenario under test—what feature/regression/bug fix is at stake? +2. Identify coverage gaps, missing test cases, or weak assertions. Suggest concrete additions (unit/integration/e2e/property/fuzz). +3. Evaluate automation strategy, data management, environments, CI hooks, and traceability. +4. Celebrate strong testing craft—clear arrange/act/assert, resilient fixtures, meaningful edge coverage. + +Quality heuristics: +- Test design: boundary analysis, equivalence classes, decision tables, state transitions, risk-based prioritization. +- Automation: framework fit, page objects/components, API/mobile coverage, flaky test triage, CI/CD integration. +- Defect management: severity/priority discipline, root cause analysis, regression safeguards, metrics visibility. +- Performance & reliability: load/stress/spike/endurance plans, synthetic monitoring, SLO alignment, resource leak detection. +- Security & compliance: authz/authn, data protection, input validation, session handling, OWASP, privacy requirements. +- UX & accessibility: usability heuristics, a11y tooling (WCAG), localisation readiness, device/browser matrix. +- Environment readiness: configuration management, data seeding/masking, service virtualization, chaos testing hooks. + +Quality metrics & governance: +- Track coverage (code, requirements, risk areas), defect density/leakage, MTTR/MTTD, automation %, release health. +- Enforce quality gates: exit criteria, Definition of Done, go/no-go checklists. +- Promote shift-left testing, pair with devs, enable continuous testing and feedback loops. + +Feedback etiquette: +- Cite exact files (e.g., `tests/api/test_payments.py:42`) and describe missing scenarios or brittle patterns. +- Offer actionable plans: new test outlines, tooling suggestions, environment adjustments. +- Call assumptions (“Assuming staging mirrors prod traffic patterns…”) so teams can validate. +- If coverage and quality look solid, explicitly acknowledge the readiness and note standout practices. + +Wrap-up protocol: +- Conclude with release-readiness verdict: “Ready”, “Needs more coverage”, or “High risk”, plus a short rationale (risk, coverage, confidence). +- Recommend next actions: expand regression suite, add performance run, integrate security scan, improve reporting dashboards. + +You’re the QA conscience for this CLI. Stay playful, stay relentless about quality, and make sure every release feels boringly safe. +""" diff --git a/code_puppy/agents/agent_qa_kitten.py b/code_puppy/agents/agent_qa_kitten.py new file mode 100644 index 00000000..b33c4a74 --- /dev/null +++ b/code_puppy/agents/agent_qa_kitten.py @@ -0,0 +1,203 @@ +"""Quality Assurance Kitten - Playwright-powered browser automation agent.""" + +from .base_agent import BaseAgent + + +class QualityAssuranceKittenAgent(BaseAgent): + """Quality Assurance Kitten - Advanced browser automation with Playwright.""" + + @property + def name(self) -> str: + return "qa-kitten" + + @property + def display_name(self) -> str: + return "Quality Assurance Kitten 🐱" + + @property + def description(self) -> str: + return "Advanced web browser automation and quality assurance testing using Playwright with VQA capabilities" + + def get_available_tools(self) -> list[str]: + """Get the list of tools available to Web Browser Puppy.""" + return [ + # Core agent tools + "agent_share_your_reasoning", + # Browser control and initialization + "browser_initialize", + "browser_close", + "browser_status", + "browser_new_page", + "browser_list_pages", + # Browser navigation + "browser_navigate", + "browser_get_page_info", + "browser_go_back", + "browser_go_forward", + "browser_reload", + "browser_wait_for_load", + # Element discovery (semantic locators preferred) + "browser_find_by_role", + "browser_find_by_text", + "browser_find_by_label", + "browser_find_by_placeholder", + "browser_find_by_test_id", + "browser_find_buttons", + "browser_find_links", + "browser_xpath_query", # Fallback when semantic locators fail + # Element interactions + "browser_click", + "browser_double_click", + "browser_hover", + "browser_set_text", + "browser_get_text", + "browser_get_value", + "browser_select_option", + "browser_check", + "browser_uncheck", + # Advanced features + "browser_execute_js", + "browser_scroll", + "browser_scroll_to_element", + "browser_set_viewport", + "browser_wait_for_element", + "browser_highlight_element", + "browser_clear_highlights", + # Screenshots and VQA + "browser_screenshot_analyze", + # Workflow management + "browser_save_workflow", + "browser_list_workflows", + "browser_read_workflow", + ] + + def get_system_prompt(self) -> str: + """Get Web Browser Puppy's specialized system prompt.""" + return """ +You are Quality Assurance Kitten 🐱, an advanced autonomous browser automation and QA testing agent powered by Playwright! + +You specialize in: +🎯 **Quality Assurance Testing** - automated testing of web applications and user workflows +👁️ **Visual verification** - taking screenshots and analyzing page content for bugs +🔍 **Element discovery** - finding elements using semantic locators and accessibility best practices +📝 **Data extraction** - scraping content and gathering information from web pages +🧪 **Web automation** - filling forms, clicking buttons, navigating sites with precision +🐛 **Bug detection** - identifying UI issues, broken functionality, and accessibility problems + +## Core Workflow Philosophy + +For any browser task, follow this approach: +1. **Check Existing Workflows**: Use browser_list_workflows to see if similar tasks have been solved before +2. **Learn from History**: If relevant workflows exist, use browser_read_workflow to review proven strategies +3. **Plan & Reason**: Use share_your_reasoning to break down complex tasks and explain your approach +4. **Initialize**: Always start with browser_initialize if browser isn't running +5. **Navigate**: Use browser_navigate to reach the target page +6. **Discover**: Use semantic locators (PREFERRED) for element discovery +7. **Verify**: Use highlighting and screenshots to confirm elements +8. **Act**: Interact with elements through clicks, typing, etc. +9. **Validate**: Take screenshots or query DOM to verify actions worked +10. **Document Success**: Use browser_save_workflow to save successful patterns for future reuse + +## Tool Usage Guidelines + +### Browser Initialization +- **ALWAYS call browser_initialize first** before any other browser operations +- Choose appropriate settings: headless=False for debugging, headless=True for production +- Use browser_status to check current state + +### Element Discovery Best Practices (ACCESSIBILITY FIRST! 🌟) +- **PREFER semantic locators** - they're more reliable and follow accessibility standards +- Priority order: + 1. browser_find_by_role (button, link, textbox, heading, etc.) + 2. browser_find_by_label (for form inputs) + 3. browser_find_by_text (for visible text) + 4. browser_find_by_placeholder (for input hints) + 5. browser_find_by_test_id (for test-friendly elements) + 6. browser_xpath_query (ONLY as last resort) + +### Visual Verification Workflow +- **Before critical actions**: Use browser_highlight_element to visually confirm +- **After interactions**: Use browser_screenshot_analyze to verify results +- **VQA questions**: Ask specific, actionable questions like "Is the login button highlighted?" + +### Form Input Best Practices +- **ALWAYS check current values** with browser_get_value before typing +- Use browser_get_value after typing to verify success +- This prevents typing loops and gives clear visibility into form state +- Clear fields when appropriate before entering new text + +### Error Handling & Troubleshooting + +**When Element Discovery Fails:** +1. Try different semantic locators first +2. Use browser_find_buttons or browser_find_links to see available elements +3. Take a screenshot with browser_screenshot_analyze to understand the page layout +4. Only use XPath as absolute last resort + +**When Page Interactions Fail:** +1. Check if element is visible with browser_wait_for_element +2. Scroll element into view with browser_scroll_to_element +3. Use browser_highlight_element to confirm element location +4. Try browser_execute_js for complex interactions + +### JavaScript Execution +- Use browser_execute_js for: + - Complex page state checks + - Custom scrolling behavior + - Triggering events that standard tools can't handle + - Accessing browser APIs + +### Workflow Management 📋 + +**ALWAYS start new tasks by checking for existing workflows!** + +**At the beginning of any automation task:** +1. **browser_list_workflows** - Check what workflows are already available +2. **browser_read_workflow** - If you find a relevant workflow, read it to understand the proven approach +3. Adapt and apply the successful patterns from existing workflows + +**When to save workflows:** +- After successfully completing a complex multi-step task +- When you discover a reliable pattern for a common website interaction +- After troubleshooting and finding working solutions for tricky elements +- Include both the successful steps AND the challenges/solutions you encountered + +**Workflow naming conventions:** +- Use descriptive names like "search_and_atc_walmart", "login_to_github", "fill_contact_form" +- Include the website domain for clarity +- Focus on the main goal/outcome + +**What to include in saved workflows:** +- Step-by-step tool usage with specific parameters +- Element discovery strategies that worked +- Common pitfalls and how to avoid them +- Alternative approaches for edge cases +- Tips for handling dynamic content + +### Performance & Best Practices +- Use appropriate timeouts for element discovery (default 10s is usually fine) +- Take screenshots strategically - not after every single action +- Use browser_wait_for_load when navigating to ensure pages are ready +- Clear highlights when done for clean visual state + +## Specialized Capabilities + +🌐 **WCAG 2.2 Level AA Compliance**: Always prioritize accessibility in element discovery +📸 **Visual Question Answering**: Use browser_screenshot_analyze for intelligent page analysis +🚀 **Semantic Web Navigation**: Prefer role-based and label-based element discovery +⚡ **Playwright Power**: Full access to modern browser automation capabilities +📋 **Workflow Management**: Save, load, and reuse automation patterns for consistency + +## Important Rules + +- **ALWAYS check for existing workflows first** - Use browser_list_workflows at the start of new tasks +- **ALWAYS use browser_initialize before any browser operations** +- **PREFER semantic locators over XPath** - they're more maintainable and accessible +- **Use visual verification for critical actions** - highlight elements and take screenshots +- **Be explicit about your reasoning** - use share_your_reasoning for complex workflows +- **Handle errors gracefully** - provide helpful debugging information +- **Follow accessibility best practices** - your automation should work for everyone +- **Document your successes** - Save working patterns with browser_save_workflow for future reuse + +Your browser automation should be reliable, maintainable, and accessible. You are a meticulous QA engineer who catches bugs before users do! 🐱✨ +""" diff --git a/code_puppy/agents/agent_security_auditor.py b/code_puppy/agents/agent_security_auditor.py new file mode 100644 index 00000000..ebb59438 --- /dev/null +++ b/code_puppy/agents/agent_security_auditor.py @@ -0,0 +1,71 @@ +"""Security audit agent.""" + +from .base_agent import BaseAgent + + +class SecurityAuditorAgent(BaseAgent): + """Security auditor agent focused on risk and compliance findings.""" + + @property + def name(self) -> str: + return "security-auditor" + + @property + def display_name(self) -> str: + return "Security Auditor 🛡️" + + @property + def description(self) -> str: + return "Risk-based security auditor delivering actionable remediation guidance" + + def get_available_tools(self) -> list[str]: + """Auditor relies on inspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are the security auditor puppy. Objective, risk-driven, compliance-savvy. Mix kindness with ruthless clarity so teams actually fix things. + +Audit mandate: +- Scope only the files and configs tied to security posture: auth, access control, crypto, infrastructure as code, policies, logs, pipeline guards. +- Anchor every review to the agreed standards (OWASP ASVS, CIS benchmarks, NIST, SOC2, ISO 27001, internal policies). +- Gather evidence: configs, code snippets, logs, policy docs, previous findings, remediation proof. + +Audit flow per control area: +1. Summarize the control in plain terms—what asset/process is being protected? +2. Assess design and implementation versus requirements. Note gaps, compensating controls, and residual risk. +3. Classify findings by severity (Critical → High → Medium → Low → Observations) and explain business impact. +4. Prescribe actionable remediation, including owners, tooling, and timelines. + +Focus domains: +- Access control: least privilege, RBAC/ABAC, provisioning/deprovisioning, MFA, session management, segregation of duties. +- Data protection: encryption in transit/at rest, key management, data retention/disposal, privacy controls, DLP, backups. +- Infrastructure: hardening, network segmentation, firewall rules, patch cadence, logging/monitoring, IaC drift. +- Application security: input validation, output encoding, authn/z flows, error handling, dependency hygiene, SAST/DAST results, third-party service usage. +- Cloud posture: IAM policies, security groups, storage buckets, serverless configs, managed service controls, compliance guardrails. +- Incident response: runbooks, detection coverage, escalation paths, tabletop cadence, communication templates, root cause discipline. +- Third-party & supply chain: vendor assessments, SLA clauses, data sharing agreements, SBOM, package provenance. + +Evidence & documentation: +- Record exact file paths/lines (e.g., `infra/terraform/iam.tf:42`) and attach relevant policy references. +- Note tooling outputs (semgrep, Snyk, Dependabot, SCAs), log excerpts, interview summaries. +- Flag missing artifacts (no threat model, absent runbooks) as findings. + +Reporting etiquette: +- Be concise but complete: risk description, impact, likelihood, affected assets, recommendation. +- Suggest remediation phases: immediate quick win, medium-term fix, long-term strategic guardrail. +- Call out positive controls or improvements observed—security teams deserve treats too. + +Wrap-up protocol: +- Deliver overall risk rating (“High risk”, “Moderate risk”, “Low risk”) and compliance posture summary. +- Provide remediation roadmap with priorities, owners, and success metrics. +- Highlight verification steps (retest requirements, monitoring hooks, policy updates). + +You’re the security audit persona for this CLI. Stay independent, stay constructive, and keep the whole pack safe. +""" diff --git a/code_puppy/agents/agent_typescript_reviewer.py b/code_puppy/agents/agent_typescript_reviewer.py new file mode 100644 index 00000000..e677ae0b --- /dev/null +++ b/code_puppy/agents/agent_typescript_reviewer.py @@ -0,0 +1,67 @@ +"""TypeScript code reviewer agent.""" + +from .base_agent import BaseAgent + + +class TypeScriptReviewerAgent(BaseAgent): + """TypeScript-focused code review agent.""" + + @property + def name(self) -> str: + return "typescript-reviewer" + + @property + def display_name(self) -> str: + return "TypeScript Reviewer 🦾" + + @property + def description(self) -> str: + return "Hyper-picky TypeScript reviewer ensuring type safety, DX, and runtime correctness" + + def get_available_tools(self) -> list[str]: + """Reviewers only need read-only inspection helpers.""" + return [ + "agent_share_your_reasoning", + "agent_run_shell_command", + "list_files", + "read_file", + "grep", + ] + + def get_system_prompt(self) -> str: + return """ +You are an elite TypeScript reviewer puppy. Keep the jokes coming, but defend type soundness, DX, and runtime sanity like it’s your chew toy. + +Mission directives: +- Review only `.ts`/`.tsx` files (and `.mts`/`.cts`) with substantive code changes. Skip untouched files or cosmetic reformatting. +- Inspect adjacent config only when it impacts TypeScript behaviour (`tsconfig.json`, `package.json`, build scripts, ESLint configs, etc.). Otherwise ignore. +- Uphold strict mode, tsconfig hygiene, and conventions from VoltAgent’s typescript-pro manifest: discriminated unions, branded types, exhaustive checks, type predicates, asm-level correctness. +- Enforce toolchain discipline: `tsc --noEmit`, `eslint --max-warnings=0`, `prettier`, `vitest`/`jest`, `ts-prune`, bundle tests, and CI parity. + +Per TypeScript file with real deltas: +1. Lead with a punchy summary of the behavioural change. +2. Enumerate findings sorted by severity (blockers → warnings → nits). Critique correctness, type system usage, framework idioms, DX, build implications, and perf. +3. Hand out praise bullets when the diff flexes—clean discriminated unions, ergonomic generics, type-safe React composition, slick tRPC bindings, reduced bundle size, etc. + +Review heuristics: +- Type system mastery: check discriminated unions, satisfies operator, branded types, conditional types, inference quality, and make sure `never` remains impossible. +- Runtime safety: ensure exhaustive switch statements, result/error return types, proper null/undefined handling, and no silent promise voids. +- Full-stack types: verify shared contracts (API clients, tRPC, GraphQL), zod/io-ts validators, and that server/client stay in sync. +- Framework idioms: React hooks stability, Next.js data fetching constraints, Angular strict DI tokens, Vue/Svelte signals typing, Node/Express request typings. +- Performance & DX: make sure tree-shaking works, no accidental `any` leaks, path aliasing resolves, lazy-loaded routes typed, and editors won’t crawl. +- Testing expectations: type-safe test doubles, fixture typing, vitest/jest coverage for tricky branches, playwright/cypress typing if included. +- Config vigilance: tsconfig targets, module resolution, project references, monorepo boundaries, and build pipeline impacts (webpack/vite/esbuild). +- Security: input validation, auth guards, CSRF/CSR token handling, SSR data leaks, and sanitization for DOM APIs. + +Feedback style: +- Be cheeky but constructive. “Consider …” or “Maybe try …” keeps the tail wagging. +- Group related feedback; cite precise lines like `src/components/Foo.tsx:42`. No ranges, no vibes-only feedback. +- Flag unknowns or assumptions explicitly so humans know what to double-check. +- If nothing smells funky, celebrate and spotlight strengths. + +Wrap-up protocol: +- End with repo-wide verdict: “Ship it”, “Needs fixes”, or “Mixed bag”, plus a crisp justification (type soundness, test coverage, bundle delta, etc.). +- Suggest next actions when blockers exist (add discriminated union tests, tighten generics, adjust tsconfig). Keep it practical. + +You’re the TypeScript review persona for this CLI. Be witty, ruthless about quality, and delightfully helpful. +""" diff --git a/code_puppy/agents/base_agent.py b/code_puppy/agents/base_agent.py new file mode 100644 index 00000000..d454bbc9 --- /dev/null +++ b/code_puppy/agents/base_agent.py @@ -0,0 +1,1056 @@ +"""Base agent configuration class for defining agent properties.""" + +import asyncio +import json +import math +import signal +import uuid +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union + +import mcp +import pydantic +import pydantic_ai.models +from pydantic_ai import Agent as PydanticAgent +from pydantic_ai import BinaryContent, DocumentUrl, ImageUrl +from pydantic_ai import RunContext, UsageLimitExceeded, UsageLimits +from pydantic_ai.messages import ( + ModelMessage, + ModelRequest, + TextPart, + ToolCallPart, + ToolCallPartDelta, + ToolReturn, + ToolReturnPart, +) +from pydantic_ai.models.openai import OpenAIChatModelSettings +from pydantic_ai.settings import ModelSettings + +# Consolidated relative imports +from code_puppy.config import ( + get_agent_pinned_model, + get_compaction_strategy, + get_compaction_threshold, + get_global_model_name, + get_openai_reasoning_effort, + get_protected_token_count, + get_value, + load_mcp_server_configs, + get_message_limit, +) +from code_puppy.mcp_ import ServerConfig, get_mcp_manager +from code_puppy.messaging import ( + emit_error, + emit_info, + emit_system_message, + emit_warning, +) +from code_puppy.messaging.spinner import ( + SpinnerBase, + update_spinner_context, +) +from code_puppy.model_factory import ModelFactory +from code_puppy.summarization_agent import run_summarization_sync +from code_puppy.tools.common import console + + +class BaseAgent(ABC): + """Base class for all agent configurations.""" + + def __init__(self): + self.id = str(uuid.uuid4()) + self._message_history: List[Any] = [] + self._compacted_message_hashes: Set[str] = set() + # Agent construction cache + self._code_generation_agent = None + self._last_model_name: Optional[str] = None + # Puppy rules loaded lazily + self._puppy_rules: Optional[str] = None + self.cur_model: pydantic_ai.models.Model + + @property + @abstractmethod + def name(self) -> str: + """Unique identifier for the agent.""" + pass + + @property + @abstractmethod + def display_name(self) -> str: + """Human-readable name for the agent.""" + pass + + @property + @abstractmethod + def description(self) -> str: + """Brief description of what this agent does.""" + pass + + @abstractmethod + def get_system_prompt(self) -> str: + """Get the system prompt for this agent.""" + pass + + @abstractmethod + def get_available_tools(self) -> List[str]: + """Get list of tool names that this agent should have access to. + + Returns: + List of tool names to register for this agent. + """ + pass + + def get_tools_config(self) -> Optional[Dict[str, Any]]: + """Get tool configuration for this agent. + + Returns: + Dict with tool configuration, or None to use default tools. + """ + return None + + def get_user_prompt(self) -> Optional[str]: + """Get custom user prompt for this agent. + + Returns: + Custom prompt string, or None to use default. + """ + return None + + # Message history management methods + def get_message_history(self) -> List[Any]: + """Get the message history for this agent. + + Returns: + List of messages in this agent's conversation history. + """ + return self._message_history + + def set_message_history(self, history: List[Any]) -> None: + """Set the message history for this agent. + + Args: + history: List of messages to set as the conversation history. + """ + self._message_history = history + + def clear_message_history(self) -> None: + """Clear the message history for this agent.""" + self._message_history = [] + self._compacted_message_hashes.clear() + + def append_to_message_history(self, message: Any) -> None: + """Append a message to this agent's history. + + Args: + message: Message to append to the conversation history. + """ + self._message_history.append(message) + + def extend_message_history(self, history: List[Any]) -> None: + """Extend this agent's message history with multiple messages. + + Args: + history: List of messages to append to the conversation history. + """ + self._message_history.extend(history) + + def get_compacted_message_hashes(self) -> Set[str]: + """Get the set of compacted message hashes for this agent. + + Returns: + Set of hashes for messages that have been compacted/summarized. + """ + return self._compacted_message_hashes + + def add_compacted_message_hash(self, message_hash: str) -> None: + """Add a message hash to the set of compacted message hashes. + + Args: + message_hash: Hash of a message that has been compacted/summarized. + """ + self._compacted_message_hashes.add(message_hash) + + def get_model_name(self) -> Optional[str]: + """Get pinned model name for this agent, if specified. + + Returns: + Model name to use for this agent, or global default if none pinned. + """ + pinned = get_agent_pinned_model(self.name) + if pinned == "" or pinned is None: + return get_global_model_name() + return pinned + + def _clean_binaries(self, messages: List[ModelMessage]) -> List[ModelMessage]: + cleaned = [] + for message in messages: + parts = [] + for part in message.parts: + if hasattr(part, "content") and isinstance(part.content, list): + content = [] + for item in part.content: + if not isinstance(item, BinaryContent): + content.append(item) + part.content = content + parts.append(part) + cleaned.append(message) + return cleaned + + # Message history processing methods (moved from state_management.py and message_history_processor.py) + def _stringify_part(self, part: Any) -> str: + """Create a stable string representation for a message part. + + We deliberately ignore timestamps so identical content hashes the same even when + emitted at different times. This prevents status updates from blowing up the + history when they are repeated with new timestamps.""" + + attributes: List[str] = [part.__class__.__name__] + + # Role/instructions help disambiguate parts that otherwise share content + if hasattr(part, "role") and part.role: + attributes.append(f"role={part.role}") + if hasattr(part, "instructions") and part.instructions: + attributes.append(f"instructions={part.instructions}") + + if hasattr(part, "tool_call_id") and part.tool_call_id: + attributes.append(f"tool_call_id={part.tool_call_id}") + + if hasattr(part, "tool_name") and part.tool_name: + attributes.append(f"tool_name={part.tool_name}") + + content = getattr(part, "content", None) + if content is None: + attributes.append("content=None") + elif isinstance(content, str): + attributes.append(f"content={content}") + elif isinstance(content, pydantic.BaseModel): + attributes.append( + f"content={json.dumps(content.model_dump(), sort_keys=True)}" + ) + elif isinstance(content, dict): + attributes.append(f"content={json.dumps(content, sort_keys=True)}") + elif isinstance(content, list): + for item in content: + if isinstance(item, str): + attributes.append(f"content={item}") + if isinstance(item, BinaryContent): + attributes.append(f"BinaryContent={hash(item.data)}") + else: + attributes.append(f"content={repr(content)}") + result = "|".join(attributes) + return result + + def hash_message(self, message: Any) -> int: + """Create a stable hash for a model message that ignores timestamps.""" + role = getattr(message, "role", None) + instructions = getattr(message, "instructions", None) + header_bits: List[str] = [] + if role: + header_bits.append(f"role={role}") + if instructions: + header_bits.append(f"instructions={instructions}") + + part_strings = [ + self._stringify_part(part) for part in getattr(message, "parts", []) + ] + canonical = "||".join(header_bits + part_strings) + return hash(canonical) + + def stringify_message_part(self, part) -> str: + """ + Convert a message part to a string representation for token estimation or other uses. + + Args: + part: A message part that may contain content or be a tool call + + Returns: + String representation of the message part + """ + result = "" + if hasattr(part, "part_kind"): + result += part.part_kind + ": " + else: + result += str(type(part)) + ": " + + # Handle content + if hasattr(part, "content") and part.content: + # Handle different content types + if isinstance(part.content, str): + result = part.content + elif isinstance(part.content, pydantic.BaseModel): + result = json.dumps(part.content.model_dump()) + elif isinstance(part.content, dict): + result = json.dumps(part.content) + elif isinstance(part.content, list): + result = "" + for item in part.content: + if isinstance(item, str): + result += item + "\n" + if isinstance(item, BinaryContent): + result += f"BinaryContent={hash(item.data)}\n" + else: + result = str(part.content) + + # Handle tool calls which may have additional token costs + # If part also has content, we'll process tool calls separately + if hasattr(part, "tool_name") and part.tool_name: + # Estimate tokens for tool name and parameters + tool_text = part.tool_name + if hasattr(part, "args"): + tool_text += f" {str(part.args)}" + result += tool_text + + return result + + def estimate_token_count(self, text: str) -> int: + """ + Simple token estimation using len(message) / 3. + This replaces tiktoken with a much simpler approach. + """ + return max(1, math.floor((len(text) / 3))) + + def estimate_tokens_for_message(self, message: ModelMessage) -> int: + """ + Estimate the number of tokens in a message using len(message) + Simple and fast replacement for tiktoken. + """ + total_tokens = 0 + + for part in message.parts: + part_str = self.stringify_message_part(part) + if part_str: + total_tokens += self.estimate_token_count(part_str) + + return max(1, total_tokens) + + def _is_tool_call_part(self, part: Any) -> bool: + if isinstance(part, (ToolCallPart, ToolCallPartDelta)): + return True + + part_kind = (getattr(part, "part_kind", "") or "").replace("_", "-") + if part_kind == "tool-call": + return True + + has_tool_name = getattr(part, "tool_name", None) is not None + has_args = getattr(part, "args", None) is not None + has_args_delta = getattr(part, "args_delta", None) is not None + + return bool(has_tool_name and (has_args or has_args_delta)) + + def _is_tool_return_part(self, part: Any) -> bool: + if isinstance(part, (ToolReturnPart, ToolReturn)): + return True + + part_kind = (getattr(part, "part_kind", "") or "").replace("_", "-") + if part_kind in {"tool-return", "tool-result"}: + return True + + if getattr(part, "tool_call_id", None) is None: + return False + + has_content = getattr(part, "content", None) is not None + has_content_delta = getattr(part, "content_delta", None) is not None + return bool(has_content or has_content_delta) + + def filter_huge_messages(self, messages: List[ModelMessage]) -> List[ModelMessage]: + filtered = [m for m in messages if self.estimate_tokens_for_message(m) < 50000] + pruned = self.prune_interrupted_tool_calls(filtered) + return pruned + + def split_messages_for_protected_summarization( + self, + messages: List[ModelMessage], + ) -> Tuple[List[ModelMessage], List[ModelMessage]]: + """ + Split messages into two groups: messages to summarize and protected recent messages. + + Returns: + Tuple of (messages_to_summarize, protected_messages) + + The protected_messages are the most recent messages that total up to the configured protected token count. + The system message (first message) is always protected. + All other messages that don't fit in the protected zone will be summarized. + """ + if len(messages) <= 1: # Just system message or empty + return [], messages + + # Always protect the system message (first message) + system_message = messages[0] + system_tokens = self.estimate_tokens_for_message(system_message) + + if len(messages) == 1: + return [], messages + + # Get the configured protected token count + protected_tokens_limit = get_protected_token_count() + + # Calculate tokens for messages from most recent backwards (excluding system message) + protected_messages = [] + protected_token_count = system_tokens # Start with system message tokens + + # Go backwards through non-system messages to find protected zone + for i in range( + len(messages) - 1, 0, -1 + ): # Stop at 1, not 0 (skip system message) + message = messages[i] + message_tokens = self.estimate_tokens_for_message(message) + + # If adding this message would exceed protected tokens, stop here + if protected_token_count + message_tokens > protected_tokens_limit: + break + + protected_messages.append(message) + protected_token_count += message_tokens + + # Messages that were added while scanning backwards are currently in reverse order. + # Reverse them to restore chronological ordering, then prepend the system prompt. + protected_messages.reverse() + protected_messages.insert(0, system_message) + + # Messages to summarize are everything between the system message and the + # protected tail zone we just constructed. + protected_start_idx = max(1, len(messages) - (len(protected_messages) - 1)) + messages_to_summarize = messages[1:protected_start_idx] + + # Emit info messages + emit_info( + f"🔒 Protecting {len(protected_messages)} recent messages ({protected_token_count} tokens, limit: {protected_tokens_limit})" + ) + emit_info(f"📝 Summarizing {len(messages_to_summarize)} older messages") + + return messages_to_summarize, protected_messages + + def summarize_messages( + self, messages: List[ModelMessage], with_protection: bool = True + ) -> Tuple[List[ModelMessage], List[ModelMessage]]: + """ + Summarize messages while protecting recent messages up to PROTECTED_TOKENS. + + Returns: + Tuple of (compacted_messages, summarized_source_messages) + where compacted_messages always preserves the original system message + as the first entry. + """ + messages_to_summarize: List[ModelMessage] + protected_messages: List[ModelMessage] + + if with_protection: + messages_to_summarize, protected_messages = ( + self.split_messages_for_protected_summarization(messages) + ) + else: + messages_to_summarize = messages[1:] if messages else [] + protected_messages = messages[:1] + + if not messages: + return [], [] + + system_message = messages[0] + + if not messages_to_summarize: + # Nothing to summarize, so just return the original sequence + return self.prune_interrupted_tool_calls(messages), [] + + instructions = ( + "The input will be a log of Agentic AI steps that have been taken" + " as well as user queries, etc. Summarize the contents of these steps." + " The high level details should remain but the bulk of the content from tool-call" + " responses should be compacted and summarized. For example if you see a tool-call" + " reading a file, and the file contents are large, then in your summary you might just" + " write: * used read_file on space_invaders.cpp - contents removed." + "\n Make sure your result is a bulleted list of all steps and interactions." + "\n\nNOTE: This summary represents older conversation history. Recent messages are preserved separately." + ) + + try: + new_messages = run_summarization_sync( + instructions, message_history=messages_to_summarize + ) + + if not isinstance(new_messages, list): + emit_warning( + "Summarization agent returned non-list output; wrapping into message request" + ) + new_messages = [ModelRequest([TextPart(str(new_messages))])] + + compacted: List[ModelMessage] = [system_message] + list(new_messages) + + # Drop the system message from protected_messages because we already included it + protected_tail = [ + msg for msg in protected_messages if msg is not system_message + ] + + compacted.extend(protected_tail) + + return self.prune_interrupted_tool_calls(compacted), messages_to_summarize + except Exception as e: + emit_error(f"Summarization failed during compaction: {e}") + return messages, [] # Return original messages on failure + + def get_model_context_length(self) -> int: + """ + Return the context length for this agent's effective model. + + Honors per-agent pinned model via `self.get_model_name()`; falls back + to global model when no pin is set. Defaults conservatively on failure. + """ + try: + model_configs = ModelFactory.load_config() + # Use the agent's effective model (respects /pin_model) + model_name = self.get_model_name() + model_config = model_configs.get(model_name, {}) + context_length = model_config.get("context_length", 128000) + return int(context_length) + except Exception: + # Be safe; don't blow up status/compaction if model lookup fails + return 128000 + + def prune_interrupted_tool_calls( + self, messages: List[ModelMessage] + ) -> List[ModelMessage]: + """ + Remove any messages that participate in mismatched tool call sequences. + + A mismatched tool call id is one that appears in a ToolCall (model/tool request) + without a corresponding tool return, or vice versa. We preserve original order + and only drop messages that contain parts referencing mismatched tool_call_ids. + """ + if not messages: + return messages + + tool_call_ids: Set[str] = set() + tool_return_ids: Set[str] = set() + + # First pass: collect ids for calls vs returns + for msg in messages: + for part in getattr(msg, "parts", []) or []: + tool_call_id = getattr(part, "tool_call_id", None) + if not tool_call_id: + continue + # Heuristic: if it's an explicit ToolCallPart or has a tool_name/args, + # consider it a call; otherwise it's a return/result. + if part.part_kind == "tool-call": + tool_call_ids.add(tool_call_id) + else: + tool_return_ids.add(tool_call_id) + + mismatched: Set[str] = tool_call_ids.symmetric_difference(tool_return_ids) + if not mismatched: + return messages + + pruned: List[ModelMessage] = [] + dropped_count = 0 + for msg in messages: + has_mismatched = False + for part in getattr(msg, "parts", []) or []: + tcid = getattr(part, "tool_call_id", None) + if tcid and tcid in mismatched: + has_mismatched = True + break + if has_mismatched: + dropped_count += 1 + continue + pruned.append(msg) + return pruned + + def message_history_processor( + self, ctx: RunContext, messages: List[ModelMessage] + ) -> List[ModelMessage]: + # First, prune any interrupted/mismatched tool-call conversations + model_max = self.get_model_context_length() + + total_current_tokens = sum( + self.estimate_tokens_for_message(msg) for msg in messages + ) + proportion_used = total_current_tokens / model_max + + # Check if we're in TUI mode and can update the status bar + from code_puppy.tui_state import get_tui_app_instance, is_tui_mode + + context_summary = SpinnerBase.format_context_info( + total_current_tokens, model_max, proportion_used + ) + update_spinner_context(context_summary) + + if is_tui_mode(): + tui_app = get_tui_app_instance() + if tui_app: + try: + # Update the status bar instead of emitting a chat message + status_bar = tui_app.query_one("StatusBar") + status_bar.update_token_info( + total_current_tokens, model_max, proportion_used + ) + except Exception as e: + emit_error(e) + else: + emit_info( + f"Final token count after processing: {total_current_tokens}", + message_group="token_context_status", + ) + # Get the configured compaction threshold + compaction_threshold = get_compaction_threshold() + + # Get the configured compaction strategy + compaction_strategy = get_compaction_strategy() + + if proportion_used > compaction_threshold: + if compaction_strategy == "truncation": + # Use truncation instead of summarization + protected_tokens = get_protected_token_count() + result_messages = self.truncation( + self.filter_huge_messages(messages), protected_tokens + ) + summarized_messages = [] # No summarization in truncation mode + else: + # Default to summarization + result_messages, summarized_messages = self.summarize_messages( + self.filter_huge_messages(messages) + ) + + final_token_count = sum( + self.estimate_tokens_for_message(msg) for msg in result_messages + ) + # Update status bar with final token count if in TUI mode + final_summary = SpinnerBase.format_context_info( + final_token_count, model_max, final_token_count / model_max + ) + update_spinner_context(final_summary) + + if is_tui_mode(): + tui_app = get_tui_app_instance() + if tui_app: + try: + status_bar = tui_app.query_one("StatusBar") + status_bar.update_token_info( + final_token_count, model_max, final_token_count / model_max + ) + except Exception: + emit_info( + f"Final token count after processing: {final_token_count}", + message_group="token_context_status", + ) + else: + emit_info( + f"Final token count after processing: {final_token_count}", + message_group="token_context_status", + ) + + self.set_message_history(result_messages) + for m in summarized_messages: + self.add_compacted_message_hash(self.hash_message(m)) + return result_messages + return messages + + def truncation( + self, messages: List[ModelMessage], protected_tokens: int + ) -> List[ModelMessage]: + """ + Truncate message history to manage token usage. + + Args: + messages: List of messages to truncate + protected_tokens: Number of tokens to protect + + Returns: + Truncated list of messages + """ + import queue + + emit_info("Truncating message history to manage token usage") + result = [messages[0]] # Always keep the first message (system prompt) + num_tokens = 0 + stack = queue.LifoQueue() + + # Put messages in reverse order (most recent first) into the stack + # but break when we exceed protected_tokens + for idx, msg in enumerate(reversed(messages[1:])): # Skip the first message + num_tokens += self.estimate_tokens_for_message(msg) + if num_tokens > protected_tokens: + break + stack.put(msg) + + # Pop messages from stack to get them in chronological order + while not stack.empty(): + result.append(stack.get()) + + result = self.prune_interrupted_tool_calls(result) + return result + + def run_summarization_sync( + self, + instructions: str, + message_history: List[ModelMessage], + ) -> Union[List[ModelMessage], str]: + """ + Run summarization synchronously using the configured summarization agent. + This is exposed as a method so it can be overridden by subclasses if needed. + + Args: + instructions: Instructions for the summarization agent + message_history: List of messages to summarize + + Returns: + Summarized messages or text + """ + return run_summarization_sync(instructions, message_history) + + # ===== Agent wiring formerly in code_puppy/agent.py ===== + def load_puppy_rules(self) -> Optional[str]: + """Load AGENT(S).md if present and cache the contents.""" + if self._puppy_rules is not None: + return self._puppy_rules + from pathlib import Path + + possible_paths = ["AGENTS.md", "AGENT.md", "agents.md", "agent.md"] + for path_str in possible_paths: + puppy_rules_path = Path(path_str) + if puppy_rules_path.exists(): + with open(puppy_rules_path, "r") as f: + self._puppy_rules = f.read() + break + return self._puppy_rules + + def load_mcp_servers(self, extra_headers: Optional[Dict[str, str]] = None): + """Load MCP servers through the manager and return pydantic-ai compatible servers.""" + + mcp_disabled = get_value("disable_mcp_servers") + if mcp_disabled and str(mcp_disabled).lower() in ("1", "true", "yes", "on"): + emit_system_message("[dim]MCP servers disabled via config[/dim]") + return [] + + manager = get_mcp_manager() + configs = load_mcp_server_configs() + if not configs: + existing_servers = manager.list_servers() + if not existing_servers: + emit_system_message("[dim]No MCP servers configured[/dim]") + return [] + else: + for name, conf in configs.items(): + try: + server_config = ServerConfig( + id=conf.get("id", f"{name}_{hash(name)}"), + name=name, + type=conf.get("type", "sse"), + enabled=conf.get("enabled", True), + config=conf, + ) + existing = manager.get_server_by_name(name) + if not existing: + manager.register_server(server_config) + emit_system_message(f"[dim]Registered MCP server: {name}[/dim]") + else: + if existing.config != server_config.config: + manager.update_server(existing.id, server_config) + emit_system_message( + f"[dim]Updated MCP server: {name}[/dim]" + ) + except Exception as e: + emit_error(f"Failed to register MCP server '{name}': {str(e)}") + continue + + servers = manager.get_servers_for_agent() + if servers: + emit_system_message( + f"[green]Successfully loaded {len(servers)} MCP server(s)[/green]" + ) + # Stay silent when there are no servers configured/available + return servers + + def reload_mcp_servers(self): + """Reload MCP servers and return updated servers.""" + self.load_mcp_servers() + manager = get_mcp_manager() + return manager.get_servers_for_agent() + + def _load_model_with_fallback( + self, + requested_model_name: str, + models_config: Dict[str, Any], + message_group: str, + ) -> Tuple[Any, str]: + """Load the requested model, applying a friendly fallback when unavailable.""" + try: + model = ModelFactory.get_model(requested_model_name, models_config) + return model, requested_model_name + except ValueError as exc: + available_models = list(models_config.keys()) + available_str = ( + ", ".join(sorted(available_models)) + if available_models + else "no configured models" + ) + emit_warning( + ( + f"[yellow]Model '{requested_model_name}' not found. " + f"Available models: {available_str}[/yellow]" + ), + message_group=message_group, + ) + + fallback_candidates: List[str] = [] + global_candidate = get_global_model_name() + if global_candidate: + fallback_candidates.append(global_candidate) + + for candidate in available_models: + if candidate not in fallback_candidates: + fallback_candidates.append(candidate) + + for candidate in fallback_candidates: + if not candidate or candidate == requested_model_name: + continue + try: + model = ModelFactory.get_model(candidate, models_config) + emit_info( + f"[bold cyan]Using fallback model: {candidate}[/bold cyan]", + message_group=message_group, + ) + return model, candidate + except ValueError: + continue + + friendly_message = ( + "No valid model could be loaded. Update the model configuration or set " + "a valid model with `config set`." + ) + emit_error( + f"[bold red]{friendly_message}[/bold red]", + message_group=message_group, + ) + raise ValueError(friendly_message) from exc + + def reload_code_generation_agent(self, message_group: Optional[str] = None): + """Force-reload the pydantic-ai Agent based on current config and model.""" + from code_puppy.tools import register_tools_for_agent + + if message_group is None: + message_group = str(uuid.uuid4()) + + model_name = self.get_model_name() + + emit_info( + f"[bold cyan]Loading Model: {model_name}[/bold cyan]", + message_group=message_group, + ) + models_config = ModelFactory.load_config() + model, resolved_model_name = self._load_model_with_fallback( + model_name, + models_config, + message_group, + ) + + emit_info( + f"[bold magenta]Loading Agent: {self.name}[/bold magenta]", + message_group=message_group, + ) + + instructions = self.get_system_prompt() + puppy_rules = self.load_puppy_rules() + if puppy_rules: + instructions += f"\n{puppy_rules}" + + mcp_servers = self.load_mcp_servers() + + model_settings_dict: Dict[str, Any] = {"seed": 42} + output_tokens = max( + 2048, + min(int(0.05 * self.get_model_context_length()) - 1024, 16384), + ) + console.print(f"Max output tokens per message: {output_tokens}") + model_settings_dict["max_tokens"] = output_tokens + + model_settings: ModelSettings = ModelSettings(**model_settings_dict) + if "gpt-5" in model_name: + model_settings_dict["openai_reasoning_effort"] = ( + get_openai_reasoning_effort() + ) + model_settings_dict["extra_body"] = {"verbosity": "low"} + model_settings = OpenAIChatModelSettings(**model_settings_dict) + + self.cur_model = model + p_agent = PydanticAgent( + model=model, + instructions=instructions, + output_type=str, + retries=3, + mcp_servers=mcp_servers, + history_processors=[self.message_history_accumulator], + model_settings=model_settings, + ) + + agent_tools = self.get_available_tools() + register_tools_for_agent(p_agent, agent_tools) + + self._code_generation_agent = p_agent + self._last_model_name = resolved_model_name + # expose for run_with_mcp + self.pydantic_agent = p_agent + return self._code_generation_agent + + def message_history_accumulator(self, ctx: RunContext, messages: List[Any]): + _message_history = self.get_message_history() + message_history_hashes = set([self.hash_message(m) for m in _message_history]) + for msg in messages: + if ( + self.hash_message(msg) not in message_history_hashes + and self.hash_message(msg) not in self.get_compacted_message_hashes() + ): + _message_history.append(msg) + + # Apply message history trimming using the main processor + # This ensures we maintain global state while still managing context limits + self.message_history_processor(ctx, _message_history) + return self.get_message_history() + + async def run_with_mcp( + self, + prompt: str, + *, + attachments: Optional[Sequence[BinaryContent]] = None, + link_attachments: Optional[Sequence[Union[ImageUrl, DocumentUrl]]] = None, + **kwargs, + ) -> Any: + """Run the agent with MCP servers, attachments, and full cancellation support. + + Args: + prompt: Primary user prompt text (may be empty when attachments present). + attachments: Local binary payloads (e.g., dragged images) to include. + link_attachments: Remote assets (image/document URLs) to include. + **kwargs: Additional arguments forwarded to `pydantic_ai.Agent.run`. + + Returns: + The agent's response. + + Raises: + asyncio.CancelledError: When execution is cancelled by user. + """ + group_id = str(uuid.uuid4()) + # Avoid double-loading: reuse existing agent if already built + pydantic_agent = self._code_generation_agent or self.reload_code_generation_agent() + + # Build combined prompt payload when attachments are provided. + attachment_parts: List[Any] = [] + if attachments: + attachment_parts.extend(list(attachments)) + if link_attachments: + attachment_parts.extend(list(link_attachments)) + + if attachment_parts: + prompt_payload: Union[str, List[Any]] = [] + if prompt: + prompt_payload.append(prompt) + prompt_payload.extend(attachment_parts) + else: + prompt_payload = prompt + + async def run_agent_task(): + try: + self.set_message_history( + self.prune_interrupted_tool_calls(self.get_message_history()) + ) + usage_limits = UsageLimits(request_limit=get_message_limit()) + result_ = await pydantic_agent.run( + prompt_payload, + message_history=self.get_message_history(), + usage_limits=usage_limits, + **kwargs, + ) + return result_ + except* UsageLimitExceeded as ule: + emit_info(f"Usage limit exceeded: {str(ule)}", group_id=group_id) + emit_info( + "The agent has reached its usage limit. You can ask it to continue by saying 'please continue' or similar.", + group_id=group_id, + ) + except* mcp.shared.exceptions.McpError as mcp_error: + emit_info(f"MCP server error: {str(mcp_error)}", group_id=group_id) + emit_info(f"{str(mcp_error)}", group_id=group_id) + emit_info( + "Try disabling any malfunctioning MCP servers", group_id=group_id + ) + except* asyncio.exceptions.CancelledError: + emit_info("Cancelled") + except* InterruptedError as ie: + emit_info(f"Interrupted: {str(ie)}") + except* Exception as other_error: + # Filter out CancelledError and UsageLimitExceeded from the exception group - let it propagate + remaining_exceptions = [] + + def collect_non_cancelled_exceptions(exc): + if isinstance(exc, ExceptionGroup): + for sub_exc in exc.exceptions: + collect_non_cancelled_exceptions(sub_exc) + elif not isinstance( + exc, (asyncio.CancelledError, UsageLimitExceeded) + ): + remaining_exceptions.append(exc) + emit_info(f"Unexpected error: {str(exc)}", group_id=group_id) + emit_info(f"{str(exc.args)}", group_id=group_id) + + collect_non_cancelled_exceptions(other_error) + + # If there are CancelledError exceptions in the group, re-raise them + cancelled_exceptions = [] + + def collect_cancelled_exceptions(exc): + if isinstance(exc, ExceptionGroup): + for sub_exc in exc.exceptions: + collect_cancelled_exceptions(sub_exc) + elif isinstance(exc, asyncio.CancelledError): + cancelled_exceptions.append(exc) + + collect_cancelled_exceptions(other_error) + finally: + self.set_message_history( + self.prune_interrupted_tool_calls(self.get_message_history()) + ) + + # Create the task FIRST + agent_task = asyncio.create_task(run_agent_task()) + + # Import shell process killer + from code_puppy.tools.command_runner import kill_all_running_shell_processes + + # Ensure the interrupt handler only acts once per task + def keyboard_interrupt_handler(sig, frame): + """Signal handler for Ctrl+C - replicating exact original logic""" + + # First, nuke any running shell processes triggered by tools + try: + killed = kill_all_running_shell_processes() + if killed: + emit_info(f"Cancelled {killed} running shell process(es).") + else: + # Only cancel the agent task if no shell processes were killed + if not agent_task.done(): + agent_task.cancel() + except Exception as e: + emit_info(f"Shell kill error: {e}") + if not agent_task.done(): + agent_task.cancel() + # Don't call the original handler + # This prevents the application from exiting + + try: + # Save original handler and set our custom one AFTER task is created + original_handler = signal.signal(signal.SIGINT, keyboard_interrupt_handler) + + # Wait for the task to complete or be cancelled + result = await agent_task + return result + except asyncio.CancelledError: + agent_task.cancel() + except KeyboardInterrupt: + # Handle direct keyboard interrupt during await + if not agent_task.done(): + agent_task.cancel() + try: + await agent_task + except asyncio.CancelledError: + pass + finally: + # Restore original signal handler + if original_handler: + signal.signal(signal.SIGINT, original_handler) diff --git a/code_puppy/agents/json_agent.py b/code_puppy/agents/json_agent.py new file mode 100644 index 00000000..62c8ff1b --- /dev/null +++ b/code_puppy/agents/json_agent.py @@ -0,0 +1,148 @@ +"""JSON-based agent configuration system.""" + +import json +from pathlib import Path +from typing import Dict, List, Optional + +from .base_agent import BaseAgent + + +class JSONAgent(BaseAgent): + """Agent configured from a JSON file.""" + + def __init__(self, json_path: str): + """Initialize agent from JSON file. + + Args: + json_path: Path to the JSON configuration file. + """ + super().__init__() + self.json_path = json_path + self._config = self._load_config() + self._validate_config() + + def _load_config(self) -> Dict: + """Load configuration from JSON file.""" + try: + with open(self.json_path, "r", encoding="utf-8") as f: + return json.load(f) + except (json.JSONDecodeError, FileNotFoundError) as e: + raise ValueError( + f"Failed to load JSON agent config from {self.json_path}: {e}" + ) + + def _validate_config(self) -> None: + """Validate required fields in configuration.""" + required_fields = ["name", "description", "system_prompt", "tools"] + for field in required_fields: + if field not in self._config: + raise ValueError( + f"Missing required field '{field}' in JSON agent config: {self.json_path}" + ) + + # Validate tools is a list + if not isinstance(self._config["tools"], list): + raise ValueError( + f"'tools' must be a list in JSON agent config: {self.json_path}" + ) + + # Validate system_prompt is string or list + system_prompt = self._config["system_prompt"] + if not isinstance(system_prompt, (str, list)): + raise ValueError( + f"'system_prompt' must be a string or list in JSON agent config: {self.json_path}" + ) + + @property + def name(self) -> str: + """Get agent name from JSON config.""" + return self._config["name"] + + @property + def display_name(self) -> str: + """Get display name from JSON config, fallback to name with emoji.""" + return self._config.get("display_name", f"{self.name.title()} 🤖") + + @property + def description(self) -> str: + """Get description from JSON config.""" + return self._config["description"] + + def get_system_prompt(self) -> str: + """Get system prompt from JSON config.""" + system_prompt = self._config["system_prompt"] + + # If it's a list, join with newlines + if isinstance(system_prompt, list): + return "\n".join(system_prompt) + + return system_prompt + + def get_available_tools(self) -> List[str]: + """Get available tools from JSON config.""" + # Filter out any tools that don't exist in our registry + from code_puppy.tools import get_available_tool_names + + available_tools = get_available_tool_names() + + # Only return tools that are both requested and available + # Also filter out 'final_result' which is not in our registry + requested_tools = [ + tool for tool in self._config["tools"] if tool in available_tools + ] + + return requested_tools + + def get_user_prompt(self) -> Optional[str]: + """Get custom user prompt from JSON config.""" + return self._config.get("user_prompt") + + def get_tools_config(self) -> Optional[Dict]: + """Get tool configuration from JSON config.""" + return self._config.get("tools_config") + + def refresh_config(self) -> None: + """Reload the agent configuration from disk. + + This keeps long-lived agent instances in sync after external edits. + """ + self._config = self._load_config() + self._validate_config() + + def get_model_name(self) -> Optional[str]: + """Get pinned model name from JSON config, if specified. + + Returns: + Model name to use for this agent, or None to use global default. + """ + result = self._config.get("model") + if result is None: + result = super().get_model_name() + return result + + +def discover_json_agents() -> Dict[str, str]: + """Discover JSON agent files in the user's agents directory. + + Returns: + Dict mapping agent names to their JSON file paths. + """ + from code_puppy.config import get_user_agents_directory + + agents = {} + agents_dir = Path(get_user_agents_directory()) + + if not agents_dir.exists() or not agents_dir.is_dir(): + return agents + + # Find all .json files in the agents directory + for json_file in agents_dir.glob("*.json"): + try: + # Try to load and validate the agent + agent = JSONAgent(str(json_file)) + agents[agent.name] = str(json_file) + except Exception: + # Skip invalid JSON agent files + continue + + return agents diff --git a/code_puppy/callbacks.py b/code_puppy/callbacks.py new file mode 100644 index 00000000..8587792c --- /dev/null +++ b/code_puppy/callbacks.py @@ -0,0 +1,208 @@ +import asyncio +import logging +import traceback +from typing import Any, Callable, Dict, List, Literal, Optional + +PhaseType = Literal[ + "startup", + "shutdown", + "invoke_agent", + "agent_exception", + "version_check", + "edit_file", + "delete_file", + "run_shell_command", + "load_model_config", + "load_prompt", + "agent_reload", + "custom_command", + "custom_command_help", +] +CallbackFunc = Callable[..., Any] + +_callbacks: Dict[PhaseType, List[CallbackFunc]] = { + "startup": [], + "shutdown": [], + "invoke_agent": [], + "agent_exception": [], + "version_check": [], + "edit_file": [], + "delete_file": [], + "run_shell_command": [], + "load_model_config": [], + "load_prompt": [], + "agent_reload": [], + "custom_command": [], + "custom_command_help": [], +} + +logger = logging.getLogger(__name__) + + +def register_callback(phase: PhaseType, func: CallbackFunc) -> None: + if phase not in _callbacks: + raise ValueError( + f"Unsupported phase: {phase}. Supported phases: {list(_callbacks.keys())}" + ) + + if not callable(func): + raise TypeError(f"Callback must be callable, got {type(func)}") + + _callbacks[phase].append(func) + logger.debug(f"Registered async callback {func.__name__} for phase '{phase}'") + + +def unregister_callback(phase: PhaseType, func: CallbackFunc) -> bool: + if phase not in _callbacks: + return False + + try: + _callbacks[phase].remove(func) + logger.debug( + f"Unregistered async callback {func.__name__} from phase '{phase}'" + ) + return True + except ValueError: + return False + + +def clear_callbacks(phase: Optional[PhaseType] = None) -> None: + if phase is None: + for p in _callbacks: + _callbacks[p].clear() + logger.debug("Cleared all async callbacks") + else: + if phase in _callbacks: + _callbacks[phase].clear() + logger.debug(f"Cleared async callbacks for phase '{phase}'") + + +def get_callbacks(phase: PhaseType) -> List[CallbackFunc]: + return _callbacks.get(phase, []).copy() + + +def count_callbacks(phase: Optional[PhaseType] = None) -> int: + if phase is None: + return sum(len(callbacks) for callbacks in _callbacks.values()) + return len(_callbacks.get(phase, [])) + + +def _trigger_callbacks_sync(phase: PhaseType, *args, **kwargs) -> List[Any]: + callbacks = get_callbacks(phase) + if not callbacks: + logger.debug(f"No callbacks registered for phase '{phase}'") + return [] + + results = [] + for callback in callbacks: + try: + result = callback(*args, **kwargs) + results.append(result) + logger.debug(f"Successfully executed async callback {callback.__name__}") + except Exception as e: + logger.error( + f"Async callback {callback.__name__} failed in phase '{phase}': {e}\n" + f"{traceback.format_exc()}" + ) + results.append(None) + + return results + + +async def _trigger_callbacks(phase: PhaseType, *args, **kwargs) -> List[Any]: + callbacks = get_callbacks(phase) + + if not callbacks: + logger.debug(f"No callbacks registered for phase '{phase}'") + return [] + + logger.debug(f"Triggering {len(callbacks)} async callbacks for phase '{phase}'") + + results = [] + for callback in callbacks: + try: + result = callback(*args, **kwargs) + if asyncio.iscoroutine(result): + result = await result + results.append(result) + logger.debug(f"Successfully executed async callback {callback.__name__}") + except Exception as e: + logger.error( + f"Async callback {callback.__name__} failed in phase '{phase}': {e}\n" + f"{traceback.format_exc()}" + ) + results.append(None) + + return results + + +async def on_startup() -> List[Any]: + return await _trigger_callbacks("startup") + + +async def on_shutdown() -> List[Any]: + return await _trigger_callbacks("shutdown") + + +async def on_invoke_agent(*args, **kwargs) -> List[Any]: + return await _trigger_callbacks("invoke_agent", *args, **kwargs) + + +async def on_agent_exception(exception: Exception, *args, **kwargs) -> List[Any]: + return await _trigger_callbacks("agent_exception", exception, *args, **kwargs) + + +async def on_version_check(*args, **kwargs) -> List[Any]: + return await _trigger_callbacks("version_check", *args, **kwargs) + + +def on_load_model_config(*args, **kwargs) -> List[Any]: + return _trigger_callbacks_sync("load_model_config", *args, **kwargs) + + +def on_edit_file(*args, **kwargs) -> Any: + return _trigger_callbacks_sync("edit_file", *args, **kwargs) + + +def on_delete_file(*args, **kwargs) -> Any: + return _trigger_callbacks_sync("delete_file", *args, **kwargs) + + +def on_run_shell_command(*args, **kwargs) -> Any: + return _trigger_callbacks_sync("run_shell_command", *args, **kwargs) + + +def on_agent_reload(*args, **kwargs) -> Any: + return _trigger_callbacks_sync("agent_reload", *args, **kwargs) + + +def on_load_prompt(): + return _trigger_callbacks_sync("load_prompt") + + +def on_custom_command_help() -> List[Any]: + """Collect custom command help entries from plugins. + + Each callback should return a list of tuples [(name, description), ...] + or a single tuple, or None. We'll flatten and sanitize results. + """ + return _trigger_callbacks_sync("custom_command_help") + + +def on_custom_command(command: str, name: str) -> List[Any]: + """Trigger custom command callbacks. + + This allows plugins to register handlers for slash commands + that are not built into the core command handler. + + Args: + command: The full command string (e.g., "/foo bar baz"). + name: The primary command name without the leading slash (e.g., "foo"). + + Returns: + Implementations may return: + - True if the command was handled (and no further action is needed) + - A string to be processed as user input by the caller + - None to indicate not handled + """ + return _trigger_callbacks_sync("custom_command", command, name) diff --git a/code_puppy/command_line/attachments.py b/code_puppy/command_line/attachments.py new file mode 100644 index 00000000..9ea5c3a1 --- /dev/null +++ b/code_puppy/command_line/attachments.py @@ -0,0 +1,363 @@ +"""Helpers for parsing file attachments from interactive prompts.""" + +from __future__ import annotations + +import mimetypes +import os +import shlex +from dataclasses import dataclass +from pathlib import Path +from typing import Iterable, List, Sequence + +from pydantic_ai import BinaryContent, DocumentUrl, ImageUrl + +SUPPORTED_INLINE_SCHEMES = {"http", "https"} + +# Allow common extensions people drag in the terminal. +DEFAULT_ACCEPTED_IMAGE_EXTENSIONS = { + ".png", + ".jpg", + ".jpeg", + ".gif", + ".bmp", + ".webp", + ".tiff", +} +DEFAULT_ACCEPTED_DOCUMENT_EXTENSIONS = set() + + +@dataclass +class PromptAttachment: + """Represents a binary attachment parsed from the input prompt.""" + + placeholder: str + content: BinaryContent + + +@dataclass +class PromptLinkAttachment: + """Represents a URL attachment supported by pydantic-ai.""" + + placeholder: str + url_part: ImageUrl | DocumentUrl + + +@dataclass +class ProcessedPrompt: + """Container for parsed input prompt and attachments.""" + + prompt: str + attachments: List[PromptAttachment] + link_attachments: List[PromptLinkAttachment] + warnings: List[str] + + +class AttachmentParsingError(RuntimeError): + """Raised when we fail to load a user-provided attachment.""" + + +def _is_probable_path(token: str) -> bool: + """Heuristically determine whether a token is a local filesystem path.""" + + if not token: + return False + if token.startswith("#"): + return False + # Windows drive letters or Unix absolute/relative paths + if token.startswith(("/", "~", "./", "../")): + return True + if len(token) >= 2 and token[1] == ":": + return True + # Things like `path/to/file.png` + return os.sep in token or "\"" in token + + +def _unescape_dragged_path(token: str) -> str: + """Convert backslash-escaped spaces used by drag-and-drop to literal spaces.""" + # Shell/terminal escaping typically produces '\ ' sequences + return token.replace(r"\ ", " ") + + +def _normalise_path(token: str) -> Path: + """Expand user shortcuts and resolve relative components without touching fs.""" + # First unescape any drag-and-drop backslash spaces before other expansions + unescaped = _unescape_dragged_path(token) + expanded = os.path.expanduser(unescaped) + try: + # This will not resolve against symlinks because we do not call resolve() + return Path(expanded).absolute() + except Exception as exc: + raise AttachmentParsingError(f"Invalid path '{token}': {exc}") from exc + + +def _determine_media_type(path: Path) -> str: + """Best-effort media type detection for images only.""" + + mime, _ = mimetypes.guess_type(path.name) + if mime: + return mime + if path.suffix.lower() in DEFAULT_ACCEPTED_IMAGE_EXTENSIONS: + return "image/png" + return "application/octet-stream" + + +def _load_binary(path: Path) -> bytes: + try: + return path.read_bytes() + except FileNotFoundError as exc: + raise AttachmentParsingError(f"Attachment not found: {path}") from exc + except PermissionError as exc: + raise AttachmentParsingError(f"Cannot read attachment (permission denied): {path}") from exc + except OSError as exc: + raise AttachmentParsingError(f"Failed to read attachment {path}: {exc}") from exc + + +def _tokenise(prompt: str) -> Iterable[str]: + """Split the prompt preserving quoted segments using shell-like semantics.""" + + if not prompt: + return [] + try: + # On Windows, avoid POSIX escaping so backslashes are preserved + posix_mode = os.name != "nt" + return shlex.split(prompt, posix=posix_mode) + except ValueError: + # Fallback naive split when shlex fails (e.g. unmatched quotes) + return prompt.split() + + +def _strip_attachment_token(token: str) -> str: + """Trim surrounding whitespace/punctuation terminals tack onto paths.""" + + return token.strip().strip(",;:()[]{}") + + +def _candidate_paths( + tokens: Sequence[str], + start: int, + max_span: int = 5, +) -> Iterable[tuple[str, int]]: + """Yield space-joined token slices to reconstruct paths with spaces.""" + + collected: list[str] = [] + for offset, raw in enumerate(tokens[start : start + max_span]): + collected.append(raw) + yield " ".join(collected), start + offset + 1 + + +def _is_supported_extension(path: Path) -> bool: + suffix = path.suffix.lower() + return suffix in DEFAULT_ACCEPTED_IMAGE_EXTENSIONS | DEFAULT_ACCEPTED_DOCUMENT_EXTENSIONS + + +def _parse_link(token: str) -> PromptLinkAttachment | None: + """URL parsing disabled: no URLs are treated as attachments.""" + return None + + +@dataclass +class _DetectedPath: + placeholder: str + path: Path | None + start_index: int + consumed_until: int + unsupported: bool = False + link: PromptLinkAttachment | None = None + + def has_path(self) -> bool: + return self.path is not None and not self.unsupported + + +def _detect_path_tokens(prompt: str) -> tuple[list[_DetectedPath], list[str]]: + # Preserve backslash-spaces from drag-and-drop before shlex tokenization + # Replace '\ ' with a marker that shlex won't split, then restore later + ESCAPE_MARKER = "\u0000ESCAPED_SPACE\u0000" + masked_prompt = prompt.replace(r"\ ", ESCAPE_MARKER) + tokens = list(_tokenise(masked_prompt)) + # Restore escaped spaces in individual tokens + tokens = [t.replace(ESCAPE_MARKER, " ") for t in tokens] + + detections: list[_DetectedPath] = [] + warnings: list[str] = [] + + index = 0 + while index < len(tokens): + token = tokens[index] + + link_attachment = _parse_link(token) + if link_attachment: + detections.append( + _DetectedPath( + placeholder=token, + path=None, + start_index=index, + consumed_until=index + 1, + link=link_attachment, + ) + ) + index += 1 + continue + + stripped_token = _strip_attachment_token(token) + if not _is_probable_path(stripped_token): + index += 1 + continue + + start_index = index + consumed_until = index + 1 + candidate_path_token = stripped_token + # For placeholder: try to reconstruct escaped representation; if none, use raw token + original_tokens_for_slice = list(_tokenise(masked_prompt))[index:consumed_until] + candidate_placeholder = "".join( + ot.replace(ESCAPE_MARKER, r"\ ") if ESCAPE_MARKER in ot else ot + for ot in original_tokens_for_slice + ) + # If placeholder seems identical to raw token, just use the raw token + if candidate_placeholder == token.replace(" ", r"\ "): + candidate_placeholder = token + + try: + path = _normalise_path(candidate_path_token) + except AttachmentParsingError as exc: + warnings.append(str(exc)) + index = consumed_until + continue + + if not path.exists() or not path.is_file(): + found_span = False + last_path = path + for joined, end_index in _candidate_paths(tokens, index): + stripped_joined = _strip_attachment_token(joined) + if not _is_probable_path(stripped_joined): + continue + candidate_path_token = stripped_joined + candidate_placeholder = joined + consumed_until = end_index + try: + last_path = _normalise_path(candidate_path_token) + except AttachmentParsingError as exc: + warnings.append(str(exc)) + found_span = False + break + if last_path.exists() and last_path.is_file(): + path = last_path + found_span = True + # We'll rebuild escaped placeholder after this block + break + if not found_span: + warnings.append(f"Attachment ignored (not a file): {path}") + index += 1 + continue + # Reconstruct escaped placeholder for multi-token paths + original_tokens_for_path = tokens[index:consumed_until] + escaped_placeholder = " ".join(original_tokens_for_path).replace(" ", r"\ ") + candidate_placeholder = escaped_placeholder + if not _is_supported_extension(path): + detections.append( + _DetectedPath( + placeholder=candidate_placeholder, + path=path, + start_index=start_index, + consumed_until=consumed_until, + unsupported=True, + ) + ) + index = consumed_until + continue + + # Reconstruct escaped placeholder for exact replacement later + # For unquoted spaces, keep the original literal token from the prompt + # so replacement matches precisely + escaped_placeholder = candidate_placeholder + + detections.append( + _DetectedPath( + placeholder=candidate_placeholder, + path=path, + start_index=start_index, + consumed_until=consumed_until, + ) + ) + index = consumed_until + + return detections, warnings + + +def parse_prompt_attachments(prompt: str) -> ProcessedPrompt: + """Extract attachments from the prompt returning cleaned text and metadata.""" + + attachments: List[PromptAttachment] = [] + + detections, detection_warnings = _detect_path_tokens(prompt) + warnings: List[str] = list(detection_warnings) + + link_attachments = [d.link for d in detections if d.link is not None] + + for detection in detections: + if detection.link is not None and detection.path is None: + continue + if detection.path is None: + continue + if detection.unsupported: + warnings.append( + f"Unsupported attachment type: {detection.path.suffix or detection.path.name}" + ) + continue + + try: + media_type = _determine_media_type(detection.path) + data = _load_binary(detection.path) + except AttachmentParsingError as exc: + warnings.append(str(exc)) + continue + attachments.append( + PromptAttachment( + placeholder=detection.placeholder, + content=BinaryContent(data=data, media_type=media_type), + ) + ) + + # Rebuild cleaned_prompt by skipping tokens consumed as file paths. + # This preserves original punctuation and spacing for non-attachment tokens. + ESCAPE_MARKER = "\u0000ESCAPED_SPACE\u0000" + masked = prompt.replace(r"\ ", ESCAPE_MARKER) + tokens = list(_tokenise(masked)) + + # Build exact token spans for file attachments (supported or unsupported) + # Skip spans for: supported files (path present and not unsupported) and links. + spans = [ + (d.start_index, d.consumed_until) + for d in detections + if (d.path is not None and not d.unsupported) or (d.link is not None and d.path is None) + ] + cleaned_parts: list[str] = [] + i = 0 + while i < len(tokens): + span = next((s for s in spans if s[0] <= i < s[1]), None) + if span is not None: + i = span[1] + continue + cleaned_parts.append(tokens[i].replace(ESCAPE_MARKER, " ")) + i += 1 + + cleaned_prompt = " ".join(cleaned_parts).strip() + cleaned_prompt = " ".join(cleaned_prompt.split()) + + if cleaned_prompt == "" and attachments: + cleaned_prompt = "Describe the attached files in detail." + + return ProcessedPrompt( + prompt=cleaned_prompt, + attachments=attachments, + link_attachments=link_attachments, + warnings=warnings, + ) + + +__all__ = [ + "ProcessedPrompt", + "PromptAttachment", + "PromptLinkAttachment", + "AttachmentParsingError", + "parse_prompt_attachments", +] \ No newline at end of file diff --git a/code_puppy/command_line/command_handler.py b/code_puppy/command_line/command_handler.py new file mode 100644 index 00000000..58e37a3e --- /dev/null +++ b/code_puppy/command_line/command_handler.py @@ -0,0 +1,839 @@ +import os +from datetime import datetime +from pathlib import Path + +from code_puppy.command_line.model_picker_completion import update_model_in_input +from code_puppy.command_line.motd import print_motd +from code_puppy.command_line.utils import make_directory_table +from code_puppy.config import ( + CONTEXTS_DIR, + finalize_autosave_session, + get_config_keys, +) +from code_puppy.session_storage import list_sessions, load_session, save_session +from code_puppy.tools.tools_content import tools_content + + +def get_commands_help(): + """Generate aligned commands help using Rich Text for safe markup.""" + from rich.text import Text + + # Ensure plugins are loaded so custom help can register + _ensure_plugins_loaded() + + # Collect core commands with their syntax parts and descriptions + # (cmd_syntax, description) + core_cmds = [ + ("/help, /h", "Show this help message"), + ("/cd ", "Change directory or show directories"), + ( + "/agent ", + "Switch to a different agent or show available agents", + ), + ("/exit, /quit", "Exit interactive mode"), + ("/generate-pr-description [@dir]", "Generate comprehensive PR description"), + ("/model, /m ", "Set active model"), + ("/reasoning ", "Set OpenAI reasoning effort for GPT-5 models"), + ("/pin_model ", "Pin a specific model to an agent"), + ("/mcp", "Manage MCP servers (list, start, stop, status, etc.)"), + ("/motd", "Show the latest message of the day (MOTD)"), + ("/show", "Show puppy config key-values"), + ( + "/compact", + "Summarize and compact current chat history (uses compaction_strategy config)", + ), + ("/dump_context ", "Save current message history to file"), + ("/load_context ", "Load message history from file"), + ( + "/set", + "Set puppy config (e.g., /set yolo_mode true, /set auto_save_session true)", + ), + ("/tools", "Show available tools and capabilities"), + ( + "/truncate ", + "Truncate history to N most recent messages (keeping system message)", + ), + ("/history [N]", "Show message history for current autosave session (optional N messages)"), + ("/", "Show unknown command warning"), + ] + + # Determine padding width for the left column + left_width = max(len(cmd) for cmd, _ in core_cmds) + 2 # add spacing + + lines: list[Text] = [] + lines.append(Text("Commands Help", style="bold magenta")) + + for cmd, desc in core_cmds: + left = Text(cmd.ljust(left_width), style="cyan") + right = Text(desc) + line = Text() + line.append_text(left) + line.append_text(right) + lines.append(line) + + # Add custom commands from plugins (if any) + try: + from code_puppy import callbacks + + custom_help_results = callbacks.on_custom_command_help() + custom_entries: list[tuple[str, str]] = [] + for res in custom_help_results: + if not res: + continue + if isinstance(res, tuple) and len(res) == 2: + custom_entries.append((str(res[0]), str(res[1]))) + elif isinstance(res, list): + for item in res: + if isinstance(item, tuple) and len(item) == 2: + custom_entries.append((str(item[0]), str(item[1]))) + if custom_entries: + lines.append(Text("", style="dim")) + lines.append(Text("Custom Commands", style="bold magenta")) + # Compute padding for custom commands as well + custom_left_width = max(len(name) for name, _ in custom_entries) + 3 + for name, desc in custom_entries: + left = Text(f"/{name}".ljust(custom_left_width), style="cyan") + right = Text(desc) + line = Text() + line.append_text(left) + line.append_text(right) + lines.append(line) + except Exception: + pass + + final_text = Text() + for i, line in enumerate(lines): + if i > 0: + final_text.append("\n") + final_text.append_text(line) + + return final_text + + +_PLUGINS_LOADED = False + + +def _ensure_plugins_loaded() -> None: + global _PLUGINS_LOADED + if _PLUGINS_LOADED: + return + try: + from code_puppy import plugins + + plugins.load_plugin_callbacks() + _PLUGINS_LOADED = True + except Exception as e: + # If plugins fail to load, continue gracefully but note it + try: + from code_puppy.messaging import emit_warning + + emit_warning(f"Plugin load error: {e}") + except Exception: + pass + _PLUGINS_LOADED = True + + +def handle_command(command: str): + from code_puppy.messaging import emit_error, emit_info, emit_success, emit_warning + _ensure_plugins_loaded() + + """ + Handle commands prefixed with '/'. + + Args: + command: The command string to handle + + Returns: + True if the command was handled, False if not, or a string to be processed as user input + """ + command = command.strip() + + if command.strip().startswith("/motd"): + print_motd(force=True) + return True + + if command.strip().startswith("/compact"): + # Functions have been moved to BaseAgent class + from code_puppy.agents.agent_manager import get_current_agent + from code_puppy.config import get_compaction_strategy, get_protected_token_count + from code_puppy.messaging import ( + emit_error, + emit_info, + emit_success, + emit_warning, + ) + + try: + agent = get_current_agent() + history = agent.get_message_history() + if not history: + emit_warning("No history to compact yet. Ask me something first!") + return True + + current_agent = get_current_agent() + before_tokens = sum( + current_agent.estimate_tokens_for_message(m) for m in history + ) + compaction_strategy = get_compaction_strategy() + protected_tokens = get_protected_token_count() + emit_info( + f"🤔 Compacting {len(history)} messages using {compaction_strategy} strategy... (~{before_tokens} tokens)" + ) + + current_agent = get_current_agent() + if compaction_strategy == "truncation": + compacted = current_agent.truncation(history, protected_tokens) + summarized_messages = [] # No summarization in truncation mode + else: + # Default to summarization + compacted, summarized_messages = current_agent.summarize_messages( + history, with_protection=True + ) + + if not compacted: + emit_error("Compaction failed. History unchanged.") + return True + + agent.set_message_history(compacted) + + current_agent = get_current_agent() + after_tokens = sum( + current_agent.estimate_tokens_for_message(m) for m in compacted + ) + reduction_pct = ( + ((before_tokens - after_tokens) / before_tokens * 100) + if before_tokens > 0 + else 0 + ) + + strategy_info = ( + f"using {compaction_strategy} strategy" + if compaction_strategy == "truncation" + else "via summarization" + ) + emit_success( + f"✨ Done! History: {len(history)} → {len(compacted)} messages {strategy_info}\n" + f"🏦 Tokens: {before_tokens:,} → {after_tokens:,} ({reduction_pct:.1f}% reduction)" + ) + return True + except Exception as e: + emit_error(f"/compact error: {e}") + return True + + if command.startswith("/cd"): + tokens = command.split() + if len(tokens) == 1: + try: + table = make_directory_table() + emit_info(table) + except Exception as e: + emit_error(f"Error listing directory: {e}") + return True + elif len(tokens) == 2: + dirname = tokens[1] + target = os.path.expanduser(dirname) + if not os.path.isabs(target): + target = os.path.join(os.getcwd(), target) + if os.path.isdir(target): + os.chdir(target) + emit_success(f"Changed directory to: {target}") + else: + emit_error(f"Not a directory: {dirname}") + return True + + if command.strip().startswith("/show"): + from code_puppy.agents import get_current_agent + from code_puppy.command_line.model_picker_completion import get_active_model + from code_puppy.config import ( + get_compaction_strategy, + get_compaction_threshold, + get_openai_reasoning_effort, + get_owner_name, + get_protected_token_count, + get_puppy_name, + get_yolo_mode, + ) + + puppy_name = get_puppy_name() + owner_name = get_owner_name() + model = get_active_model() + yolo_mode = get_yolo_mode() + protected_tokens = get_protected_token_count() + compaction_threshold = get_compaction_threshold() + compaction_strategy = get_compaction_strategy() + + # Get current agent info + current_agent = get_current_agent() + + status_msg = f"""[bold magenta]🐶 Puppy Status[/bold magenta] + +[bold]puppy_name:[/bold] [cyan]{puppy_name}[/cyan] +[bold]owner_name:[/bold] [cyan]{owner_name}[/cyan] +[bold]current_agent:[/bold] [magenta]{current_agent.display_name}[/magenta] +[bold]model:[/bold] [green]{model}[/green] +[bold]YOLO_MODE:[/bold] {"[red]ON[/red]" if yolo_mode else "[yellow]off[/yellow]"} +[bold]protected_tokens:[/bold] [cyan]{protected_tokens:,}[/cyan] recent tokens preserved +[bold]compaction_threshold:[/bold] [cyan]{compaction_threshold:.1%}[/cyan] context usage triggers compaction +[bold]compaction_strategy:[/bold] [cyan]{compaction_strategy}[/cyan] (summarization or truncation) +[bold]reasoning_effort:[/bold] [cyan]{get_openai_reasoning_effort()}[/cyan] + +""" + emit_info(status_msg) + return True + + if command.startswith("/reasoning"): + tokens = command.split() + if len(tokens) != 2: + emit_warning("Usage: /reasoning ") + return True + + effort = tokens[1] + try: + from code_puppy.config import set_openai_reasoning_effort + + set_openai_reasoning_effort(effort) + except ValueError as exc: + emit_error(str(exc)) + return True + + from code_puppy.config import get_openai_reasoning_effort + + normalized_effort = get_openai_reasoning_effort() + + from code_puppy.agents.agent_manager import get_current_agent + + agent = get_current_agent() + agent.reload_code_generation_agent() + emit_success( + f"Reasoning effort set to '{normalized_effort}' and active agent reloaded" + ) + return True + + if command.startswith("/session"): + # /session id -> show current autosave id + # /session new -> rotate autosave id + tokens = command.split() + from code_puppy.config import ( + AUTOSAVE_DIR, + get_current_autosave_id, + get_current_autosave_session_name, + rotate_autosave_id, + ) + if len(tokens) == 1 or tokens[1] == "id": + sid = get_current_autosave_id() + emit_info( + f"[bold magenta]Autosave Session[/bold magenta]: {sid}\n" + f"Files prefix: {Path(AUTOSAVE_DIR) / get_current_autosave_session_name()}" + ) + return True + if tokens[1] == "new": + new_sid = rotate_autosave_id() + emit_success(f"New autosave session id: {new_sid}") + return True + emit_warning("Usage: /session [id|new]") + return True + + if command.startswith("/set"): + # Syntax: /set KEY=VALUE or /set KEY VALUE + from code_puppy.config import set_config_value + + tokens = command.split(None, 2) + argstr = command[len("/set") :].strip() + key = None + value = None + if "=" in argstr: + key, value = argstr.split("=", 1) + key = key.strip() + value = value.strip() + elif len(tokens) >= 3: + key = tokens[1] + value = tokens[2] + elif len(tokens) == 2: + key = tokens[1] + value = "" + else: + config_keys = get_config_keys() + if "compaction_strategy" not in config_keys: + config_keys.append("compaction_strategy") + session_help = ( + "\n[yellow]Session Management[/yellow]" + "\n [cyan]auto_save_session[/cyan] Auto-save chat after every response (true/false)" + ) + emit_warning( + f"Usage: /set KEY=VALUE or /set KEY VALUE\nConfig keys: {', '.join(config_keys)}\n[dim]Note: compaction_strategy can be 'summarization' or 'truncation'[/dim]{session_help}" + ) + return True + if key: + set_config_value(key, value) + emit_success(f'🌶 Set {key} = "{value}" in puppy.cfg!') + else: + emit_error("You must supply a key.") + return True + + if command.startswith("/tools"): + # Display the tools_content.py file content with markdown formatting + from rich.markdown import Markdown + + markdown_content = Markdown(tools_content) + emit_info(markdown_content) + return True + + if command.startswith("/agent"): + # Handle agent switching + from code_puppy.agents import ( + get_agent_descriptions, + get_available_agents, + get_current_agent, + set_current_agent, + ) + + tokens = command.split() + + if len(tokens) == 1: + # Show current agent and available agents + current_agent = get_current_agent() + available_agents = get_available_agents() + descriptions = get_agent_descriptions() + + # Generate a group ID for all messages in this command + import uuid + + group_id = str(uuid.uuid4()) + + emit_info( + f"[bold green]Current Agent:[/bold green] {current_agent.display_name}", + message_group=group_id, + ) + emit_info( + f"[dim]{current_agent.description}[/dim]\n", message_group=group_id + ) + + emit_info( + "[bold magenta]Available Agents:[/bold magenta]", message_group=group_id + ) + for name, display_name in available_agents.items(): + description = descriptions.get(name, "No description") + current_marker = ( + " [green]← current[/green]" if name == current_agent.name else "" + ) + emit_info( + f" [cyan]{name:<12}[/cyan] {display_name}{current_marker}", + message_group=group_id, + ) + emit_info(f" [dim]{description}[/dim]", message_group=group_id) + + emit_info( + "\n[yellow]Usage:[/yellow] /agent ", message_group=group_id + ) + return True + + elif len(tokens) == 2: + agent_name = tokens[1].lower() + + # Generate a group ID for all messages in this command + import uuid + + group_id = str(uuid.uuid4()) + available_agents = get_available_agents() + + if agent_name not in available_agents: + emit_error(f"Agent '{agent_name}' not found", message_group=group_id) + emit_warning( + f"Available agents: {', '.join(available_agents.keys())}", + message_group=group_id, + ) + return True + + current_agent = get_current_agent() + if current_agent.name == agent_name: + emit_info( + f"Already using agent: {current_agent.display_name}", + message_group=group_id, + ) + return True + + new_session_id = finalize_autosave_session() + if not set_current_agent(agent_name): + emit_warning( + "Agent switch failed after autosave rotation. Your context was preserved.", + message_group=group_id, + ) + return True + + new_agent = get_current_agent() + new_agent.reload_code_generation_agent() + emit_success( + f"Switched to agent: {new_agent.display_name}", + message_group=group_id, + ) + emit_info(f"[dim]{new_agent.description}[/dim]", message_group=group_id) + emit_info( + f"[dim]Auto-save session rotated to: {new_session_id}[/dim]", + message_group=group_id, + ) + return True + else: + emit_warning("Usage: /agent [agent-name]") + return True + + if command.startswith("/model") or command.startswith("/m "): + # Try setting model and show confirmation + # Handle both /model and /m for backward compatibility + model_command = command + if command.startswith("/model"): + # Convert /model to /m for internal processing + model_command = command.replace("/model", "/m", 1) + + # If no model matched, show available models + from code_puppy.command_line.model_picker_completion import load_model_names + + new_input = update_model_in_input(model_command) + if new_input is not None: + from code_puppy.command_line.model_picker_completion import get_active_model + + model = get_active_model() + # Make sure this is called for the test + emit_success(f"Active model set and loaded: {model}") + return True + model_names = load_model_names() + emit_warning("Usage: /model or /m ") + emit_warning(f"Available models: {', '.join(model_names)}") + return True + + if command.startswith("/mcp"): + from code_puppy.command_line.mcp import MCPCommandHandler + + handler = MCPCommandHandler() + return handler.handle_mcp_command(command) + + # Built-in help + if command in ("/help", "/h"): + import uuid + + group_id = str(uuid.uuid4()) + help_text = get_commands_help() + emit_info(help_text, message_group_id=group_id) + return True + + if command.startswith("/pin_model"): + # Handle agent model pinning + import json + + from code_puppy.agents.json_agent import discover_json_agents + from code_puppy.command_line.model_picker_completion import load_model_names + + tokens = command.split() + + if len(tokens) != 3: + emit_warning("Usage: /pin_model ") + + # Show available models and agents + available_models = load_model_names() + json_agents = discover_json_agents() + + # Get built-in agents + from code_puppy.agents.agent_manager import get_agent_descriptions + + builtin_agents = get_agent_descriptions() + + emit_info("Available models:") + for model in available_models: + emit_info(f" [cyan]{model}[/cyan]") + + if builtin_agents: + emit_info("\nAvailable built-in agents:") + for agent_name, description in builtin_agents.items(): + emit_info(f" [cyan]{agent_name}[/cyan] - {description}") + + if json_agents: + emit_info("\nAvailable JSON agents:") + for agent_name, agent_path in json_agents.items(): + emit_info(f" [cyan]{agent_name}[/cyan] ({agent_path})") + return True + + agent_name = tokens[1].lower() + model_name = tokens[2] + + # Check if model exists + available_models = load_model_names() + if model_name not in available_models: + emit_error(f"Model '{model_name}' not found") + emit_warning(f"Available models: {', '.join(available_models)}") + return True + + # Check if this is a JSON agent or a built-in Python agent + json_agents = discover_json_agents() + + # Get list of available built-in agents + from code_puppy.agents.agent_manager import get_agent_descriptions + + builtin_agents = get_agent_descriptions() + + is_json_agent = agent_name in json_agents + is_builtin_agent = agent_name in builtin_agents + + if not is_json_agent and not is_builtin_agent: + emit_error(f"Agent '{agent_name}' not found") + + # Show available agents + if builtin_agents: + emit_info("Available built-in agents:") + for name, desc in builtin_agents.items(): + emit_info(f" [cyan]{name}[/cyan] - {desc}") + + if json_agents: + emit_info("\nAvailable JSON agents:") + for name, path in json_agents.items(): + emit_info(f" [cyan]{name}[/cyan] ({path})") + return True + + # Handle different agent types + try: + if is_json_agent: + # Handle JSON agent - modify the JSON file + agent_file_path = json_agents[agent_name] + + with open(agent_file_path, "r", encoding="utf-8") as f: + agent_config = json.load(f) + + # Set the model + agent_config["model"] = model_name + + # Save the updated configuration + with open(agent_file_path, "w", encoding="utf-8") as f: + json.dump(agent_config, f, indent=2, ensure_ascii=False) + + else: + # Handle built-in Python agent - store in config + from code_puppy.config import set_agent_pinned_model + + set_agent_pinned_model(agent_name, model_name) + + emit_success(f"Model '{model_name}' pinned to agent '{agent_name}'") + + # If this is the current agent, refresh it so the prompt updates immediately + from code_puppy.agents import get_current_agent + + current_agent = get_current_agent() + if current_agent.name == agent_name: + try: + if is_json_agent and hasattr(current_agent, "refresh_config"): + current_agent.refresh_config() + current_agent.reload_code_generation_agent() + emit_info( + f"Active agent reloaded with pinned model '{model_name}'" + ) + except Exception as reload_error: + emit_warning( + f"Pinned model applied but reload failed: {reload_error}" + ) + + return True + + except Exception as e: + emit_error(f"Failed to pin model to agent '{agent_name}': {e}") + return True + + if command.startswith("/generate-pr-description"): + # Parse directory argument (e.g., /generate-pr-description @some/dir) + tokens = command.split() + directory_context = "" + for t in tokens: + if t.startswith("@"): + directory_context = f" Please work in the directory: {t[1:]}" + break + + # Hard-coded prompt from user requirements + pr_prompt = f"""Generate a comprehensive PR description for my current branch changes. Follow these steps: + + 1 Discover the changes: Use git CLI to find the base branch (usually main/master/develop) and get the list of changed files, commits, and diffs. + 2 Analyze the code: Read and analyze all modified files to understand: + • What functionality was added/changed/removed + • The technical approach and implementation details + • Any architectural or design pattern changes + • Dependencies added/removed/updated + 3 Generate a structured PR description with these sections: + • Title: Concise, descriptive title (50 chars max) + • Summary: Brief overview of what this PR accomplishes + • Changes Made: Detailed bullet points of specific changes + • Technical Details: Implementation approach, design decisions, patterns used + • Files Modified: List of key files with brief description of changes + • Testing: What was tested and how (if applicable) + • Breaking Changes: Any breaking changes (if applicable) + • Additional Notes: Any other relevant information + 4 Create a markdown file: Generate a PR_DESCRIPTION.md file with proper GitHub markdown formatting that I can directly copy-paste into GitHub's PR + description field. Use proper markdown syntax with headers, bullet points, code blocks, and formatting. + 5 Make it review-ready: Ensure the description helps reviewers understand the context, approach, and impact of the changes. +6. If you have Github MCP, or gh cli is installed and authenticated then find the PR for the branch we analyzed and update the PR description there and then delete the PR_DESCRIPTION.md file. (If you have a better name (title) for the PR, go ahead and update the title too.{directory_context}""" + + # Return the prompt to be processed by the main chat system + return pr_prompt + + if command.startswith("/dump_context"): + from code_puppy.agents.agent_manager import get_current_agent + + tokens = command.split() + if len(tokens) != 2: + emit_warning("Usage: /dump_context ") + return True + + session_name = tokens[1] + agent = get_current_agent() + history = agent.get_message_history() + + if not history: + emit_warning("No message history to dump!") + return True + + try: + metadata = save_session( + history=history, + session_name=session_name, + base_dir=Path(CONTEXTS_DIR), + timestamp=datetime.now().isoformat(), + token_estimator=agent.estimate_tokens_for_message, + ) + emit_success( + f"✅ Context saved: {metadata.message_count} messages ({metadata.total_tokens} tokens)\n" + f"📁 Files: {metadata.pickle_path}, {metadata.metadata_path}" + ) + return True + + except Exception as exc: + emit_error(f"Failed to dump context: {exc}") + return True + + if command.startswith("/load_context"): + from code_puppy.agents.agent_manager import get_current_agent + + tokens = command.split() + if len(tokens) != 2: + emit_warning("Usage: /load_context ") + return True + + session_name = tokens[1] + contexts_dir = Path(CONTEXTS_DIR) + session_path = contexts_dir / f"{session_name}.pkl" + + try: + history = load_session(session_name, contexts_dir) + except FileNotFoundError: + emit_error(f"Context file not found: {session_path}") + available = list_sessions(contexts_dir) + if available: + emit_info(f"Available contexts: {', '.join(available)}") + return True + except Exception as exc: + emit_error(f"Failed to load context: {exc}") + return True + + agent = get_current_agent() + agent.set_message_history(history) + total_tokens = sum(agent.estimate_tokens_for_message(m) for m in history) + + # Rotate autosave id to avoid overwriting any existing autosave + try: + from code_puppy.config import rotate_autosave_id + new_id = rotate_autosave_id() + autosave_info = f"\n[dim]Autosave session rotated to: {new_id}[/dim]" + except Exception: + autosave_info = "" + + emit_success( + f"✅ Context loaded: {len(history)} messages ({total_tokens} tokens)\n" + f"📁 From: {session_path}{autosave_info}" + ) + return True + + if command.startswith("/truncate"): + from code_puppy.agents.agent_manager import get_current_agent + + tokens = command.split() + if len(tokens) != 2: + emit_error( + "Usage: /truncate (where N is the number of messages to keep)" + ) + return True + + try: + n = int(tokens[1]) + if n < 1: + emit_error("N must be a positive integer") + return True + except ValueError: + emit_error("N must be a valid integer") + return True + + agent = get_current_agent() + history = agent.get_message_history() + if not history: + emit_warning("No history to truncate yet. Ask me something first!") + return True + + if len(history) <= n: + emit_info( + f"History already has {len(history)} messages, which is <= {n}. Nothing to truncate." + ) + return True + + # Always keep the first message (system message) and then keep the N-1 most recent messages + truncated_history = ( + [history[0]] + history[-(n - 1) :] if n > 1 else [history[0]] + ) + + agent.set_message_history(truncated_history) + emit_success( + f"Truncated message history from {len(history)} to {len(truncated_history)} messages (keeping system message and {n - 1} most recent)" + ) + return True + + + + if command.startswith("/history"): + from code_puppy.command_line.history_command import handle_history_command + return handle_history_command(command) + + if command in ("/exit", "/quit"): + emit_success("Goodbye!") + # Signal to the main app that we want to exit + # The actual exit handling is done in main.py + return True + + # Try plugin-provided custom commands before unknown warning + if command.startswith("/"): + # Extract command name without leading slash and arguments intact + name = command[1:].split()[0] if len(command) > 1 else "" + try: + from code_puppy import callbacks + + results = callbacks.on_custom_command(command=command, name=name) + # Iterate through callback results; treat str as handled (no model run) + for res in results: + if res is True: + return True + if isinstance(res, str): + # Display returned text to the user and treat as handled + try: + emit_info(res) + except Exception: + pass + return True + except Exception as e: + # Log via emit_error but do not block default handling + emit_warning(f"Custom command hook error: {e}") + + if name: + emit_warning( + f"Unknown command: {command}\n[dim]Type /help for options.[/dim]" + ) + else: + # Show current model ONLY here + from code_puppy.command_line.model_picker_completion import get_active_model + + current_model = get_active_model() + emit_info( + f"[bold green]Current Model:[/bold green] [cyan]{current_model}[/cyan]" + ) + return True + + return False \ No newline at end of file diff --git a/code_puppy/command_line/file_path_completion.py b/code_puppy/command_line/file_path_completion.py new file mode 100644 index 00000000..79d0903f --- /dev/null +++ b/code_puppy/command_line/file_path_completion.py @@ -0,0 +1,73 @@ +import glob +import os +from typing import Iterable + +from prompt_toolkit.completion import Completer, Completion +from prompt_toolkit.document import Document + + +class FilePathCompleter(Completer): + """A simple file path completer that works with a trigger symbol.""" + + def __init__(self, symbol: str = "@"): + self.symbol = symbol + + def get_completions( + self, document: Document, complete_event + ) -> Iterable[Completion]: + text = document.text + cursor_position = document.cursor_position + text_before_cursor = text[:cursor_position] + if self.symbol not in text_before_cursor: + return + symbol_pos = text_before_cursor.rfind(self.symbol) + text_after_symbol = text_before_cursor[symbol_pos + len(self.symbol) :] + start_position = -(len(text_after_symbol)) + try: + pattern = text_after_symbol + "*" + if not pattern.strip("*") or pattern.strip("*").endswith("/"): + base_path = pattern.strip("*") + if not base_path: + base_path = "." + if base_path.startswith("~"): + base_path = os.path.expanduser(base_path) + if os.path.isdir(base_path): + paths = [ + os.path.join(base_path, f) + for f in os.listdir(base_path) + if not f.startswith(".") or text_after_symbol.endswith(".") + ] + else: + paths = [] + else: + paths = glob.glob(pattern) + if not pattern.startswith(".") and not pattern.startswith("*/."): + paths = [ + p for p in paths if not os.path.basename(p).startswith(".") + ] + paths.sort() + for path in paths: + is_dir = os.path.isdir(path) + display = os.path.basename(path) + if os.path.isabs(path): + display_path = path + else: + if text_after_symbol.startswith("/"): + display_path = os.path.abspath(path) + elif text_after_symbol.startswith("~"): + home = os.path.expanduser("~") + if path.startswith(home): + display_path = "~" + path[len(home) :] + else: + display_path = path + else: + display_path = path + display_meta = "Directory" if is_dir else "File" + yield Completion( + display_path, + start_position=start_position, + display=display, + display_meta=display_meta, + ) + except (PermissionError, FileNotFoundError, OSError): + pass diff --git a/code_puppy/command_line/history_command.py b/code_puppy/command_line/history_command.py new file mode 100644 index 00000000..c6952c2e --- /dev/null +++ b/code_puppy/command_line/history_command.py @@ -0,0 +1,439 @@ +"""History command implementation with message formatting capabilities.""" +import json +import re +import uuid +from datetime import datetime +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple, Union + +from code_puppy.agents.agent_manager import get_current_agent +from code_puppy.config import ( + AUTOSAVE_DIR, + get_current_autosave_id, + get_current_autosave_session_name, + get_puppy_name, +) +from code_puppy.messaging import emit_error, emit_info, emit_warning +from code_puppy.session_storage import list_sessions + + +class MessageFormatter: + """Handles formatting and display of message history.""" + + def __init__(self, verbose: bool = False): + self.verbose = verbose + self.puppy_name = get_puppy_name() + + def format_message(self, message: Any, index: int) -> str: + """Format a single message for display.""" + try: + role, content = self._extract_message_info(message) + + # Clean up role name + if role not in ['USER', 'ASSISTANT', 'SYSTEM', 'TOOL']: + role = role[:10] # Truncate very long role names + + # Truncate long content for display (unless verbose) + if not self.verbose and len(content) > 100: + content = content[:97] + "..." + + return f" [{index}] [cyan]{role}[/cyan]: {content}" + + except Exception as e: + return f" [{index}] [dim]Error parsing message: {str(e)[:30]}...[/dim]" + + def _extract_message_info(self, message: Any) -> Tuple[str, str]: + """Extract role and content from various message formats.""" + # Handle simple role/content format (legacy/compatibility) + if hasattr(message, 'role') and hasattr(message, 'content'): + role = str(message.role).upper() + content = str(message.content) + + # Handle Pydantic AI ModelRequest type + elif hasattr(message, 'parts') and hasattr(message, 'role'): + role = message.role.value if hasattr(message.role, 'value') else str(message.role) + content = self._format_parts_message(message) + + # Handle other Pydantic AI ModelMessage types + elif hasattr(message, 'parts'): + content = self._format_parts_message(message) + role = self._infer_role_from_class(message) + + # Handle dictionary format + elif isinstance(message, dict): + role = message.get('role', 'unknown').upper() + content = message.get('content', str(message)) + + # Fallback to string representation + else: + content = str(message) + if content and len(content) < 200: # If it's a short string, maybe it's just content + role = 'MESSAGE' + else: + role = type(message).__name__.upper() + + return role, content + + def _format_parts_message(self, message: Any) -> str: + """Format a message with parts (Pydantic AI format).""" + content_parts = [] + tool_call_count = 0 + has_thinking = False + thinking_parts = [] + tool_calls = [] + + for part in message.parts: + part_type = type(part).__name__ + + # UserPromptPart - show the content directly + if part_type == 'UserPromptPart' and hasattr(part, 'content'): + user_content = str(part.content).strip() + if user_content: + content_parts.append(user_content) + + # ThinkingPart - capture full thinking content and duration + elif part_type == 'ThinkingPart': + has_thinking = True + thinking_content = str(part.content) if hasattr(part, 'content') else "(no thinking content)" + + if self.verbose: + # In verbose mode, store full thinking content + thinking_parts.append(thinking_content) + else: + # In normal mode, just capture duration for summary + thinking_duration = self._extract_thinking_duration(part) + + # Store duration for later use + if thinking_duration is not None: + if not hasattr(message, '_thinking_duration'): + message._thinking_duration = 0 + message._thinking_duration = max(message._thinking_duration, thinking_duration) + + # ToolCallPart - capture detailed tool call information + elif 'ToolCall' in part_type: + tool_call_count += 1 + + if self.verbose: + # In verbose mode, capture detailed tool call information + tool_name = getattr(part, 'name', 'unknown_tool') + tool_args = getattr(part, 'args', {}) + + # Format tool call details + try: + args_str = json.dumps(tool_args, indent=2, default=str) + tool_calls.append(f" 📋 {tool_name}:\n{args_str}") + except (TypeError, ValueError): + tool_calls.append(f" 📋 {tool_name}: {tool_args}") + + # TextPart - show the content + elif part_type == 'TextPart' and hasattr(part, 'content'): + text_content = str(part.content).strip() + if text_content: + content_parts.append(text_content) + + # Fallback for other parts + elif hasattr(part, 'content'): + part_content = str(part.content).strip() + if part_content: + content_parts.append(part_content) + else: + content_parts.append(str(part)) + + return self._build_final_content( + content_parts, has_thinking, thinking_parts, tool_call_count, tool_calls, message + ) + + def _extract_thinking_duration(self, part: Any) -> Optional[float]: + """Extract thinking duration from a ThinkingPart.""" + thinking_duration = None + + # Check if the ThinkingPart has duration info + if hasattr(part, 'duration'): + thinking_duration = part.duration + elif hasattr(part, 'content'): + content_str = str(part.content) + # Look for duration in content like "thought for 2.3s" or similar patterns + duration_match = re.search( + r'(?:(?:thought|thinking|for|took)\s+[^(]*?)?(\d+(?:\.\d+)?)\s*(?:s|sec|seconds?|ms|milliseconds?)', + content_str, re.IGNORECASE + ) + if duration_match: + duration_val = float(duration_match.group(1)) + duration_unit = duration_match.group(0).lower() + if 'ms' in duration_unit: + thinking_duration = duration_val / 1000 # Convert ms to seconds + else: + thinking_duration = duration_val + + return thinking_duration + + def _build_final_content( + self, + content_parts: List[str], + has_thinking: bool, + thinking_parts: List[str], + tool_call_count: int, + tool_calls: List[str], + message: Any + ) -> str: + """Build the final content string based on collected parts.""" + if self.verbose: + # Verbose mode: show full details + if thinking_parts: + content_parts.append(f"\n 🧠 {self.puppy_name} thinking:") + for i, thinking in enumerate(thinking_parts, 1): + # Indent each line of thinking content + for line in thinking.split('\n'): + content_parts.append(f" {line}") + + if tool_calls: + content_parts.append(f"\n 🔧 {self.puppy_name} tool calls:") + for tool_call in tool_calls: + # Add proper indentation for each tool call line + for line in tool_call.split('\n'): + content_parts.append(f" {line}") + else: + # Normal mode: show summaries + if has_thinking: + # Check if we have thinking duration info + if hasattr(message, '_thinking_duration') and message._thinking_duration > 0: + duration = message._thinking_duration + if duration < 1: + # Show in milliseconds if less than 1 second + duration_ms = int(duration * 1000) + content_parts.append(f"{self.puppy_name} thought ({duration_ms}ms)") + else: + # Show in seconds with appropriate precision + if duration < 10: + content_parts.append(f"{self.puppy_name} thought ({duration:.2f}s)") + else: + content_parts.append(f"{self.puppy_name} thought ({duration:.1f}s)") + else: + content_parts.append(f"{self.puppy_name} thought") + + if tool_call_count > 0: + content_parts.append(f"{self.puppy_name} made {tool_call_count} tool call{'s' if tool_call_count != 1 else ''}") + + return " | ".join(content_parts) if content_parts else "Empty message" + + def _infer_role_from_class(self, message: Any) -> str: + """Infer role from message class name.""" + class_name = message.__class__.__name__ + if 'request' in class_name.lower(): + return 'USER' + elif 'response' in class_name.lower() or 'assistant' in class_name.lower(): + return 'ASSISTANT' + elif 'system' in class_name.lower(): + return 'SYSTEM' + else: + return class_name.replace('Message', '').replace('Model', '').upper() + + +class HistoryCommand: + """Handles the /history command with all its parsing and display logic.""" + + def __init__(self): + self.formatter: Optional[MessageFormatter] = None + + def parse_and_execute(self, command: str) -> bool: + """Parse command arguments and execute the history display. + + Args: + command: The full /history command string + + Returns: + True if command was handled successfully + """ + try: + line_count, verbose = self._parse_command_args(command) + if line_count is None: # Error case from _parse_command_args + return True + self.formatter = MessageFormatter(verbose=verbose) + + group_id = str(uuid.uuid4()) + self._show_current_session_info(group_id) + self._show_message_history(line_count, group_id) + self._show_other_sessions(group_id) + + return True + + except Exception as e: + emit_error(f"Failed to execute history command: {e}") + return True + + def _parse_command_args(self, command: str) -> Tuple[Optional[int], bool]: + """Parse command arguments for line count and verbose flag.""" + tokens = command.split() + line_count = 10 # default + verbose = False + + # Handle different argument patterns + if len(tokens) == 2: + arg = tokens[1] + if arg in ['-v', '--verbose']: + verbose = True + else: + try: + line_count = int(arg) + if line_count <= 0: + emit_error("Line count must be a positive integer") + return None, verbose + except ValueError: + emit_error(f"Invalid line count: {arg}. Must be a positive integer.") + return None, verbose + elif len(tokens) == 3: + # Handle combinations like "/history 5 -v" or "/history -v 5" + if '-v' in tokens or '--verbose' in tokens: + verbose = True + # Find the numeric argument + for token in tokens[1:]: + if token not in ['-v', '--verbose']: + try: + line_count = int(token) + if line_count <= 0: + emit_error("Line count must be a positive integer") + return None, verbose + except ValueError: + emit_error(f"Invalid line count: {token}. Must be a positive integer.") + return None, verbose + break + else: + emit_error("Usage: /history [N] [-v|--verbose] - shows N messages, verbose shows full content") + return None, verbose + elif len(tokens) > 3: + emit_error("Usage: /history [N] [-v|--verbose] - shows N messages, verbose shows full content") + return None, verbose + + return line_count, verbose + + def _show_current_session_info(self, group_id: str) -> None: + """Show information about the current autosave session.""" + current_session_name = get_current_autosave_session_name() + emit_info( + f"[bold magenta]Current Autosave Session:[/bold magenta] {current_session_name}", + message_group=group_id, + ) + + def _show_message_history(self, line_count: int, group_id: str) -> None: + """Show the actual message history.""" + try: + agent = get_current_agent() + history = agent.get_message_history() + + if not history or (isinstance(history, list) and len(history) == 0): + emit_warning( + "No message history in current session. Ask me something first!", + message_group=group_id, + ) + return + + total_tokens = sum(agent.estimate_tokens_for_message(m) for m in history) + emit_info( + f"[bold]Messages:[/bold] {len(history)} total ({total_tokens:,} tokens)", + message_group=group_id, + ) + + # Show recent messages (last N messages, or all if N >= total) + if len(history) > line_count: + recent_messages = history[-line_count:] + else: + recent_messages = history + + mode_desc = " (verbose)" if self.formatter.verbose else "" + emit_info( + f"[bold]Recent Messages (last {len(recent_messages)}):{mode_desc}[/bold]", + message_group=group_id, + ) + + # Display each message + for i, message in enumerate(recent_messages, start=len(history) - len(recent_messages) + 1): + formatted_message = self.formatter.format_message(message, i) + emit_info(formatted_message, message_group=group_id) + + if len(history) > line_count: + emit_info( + f" [dim]... and {len(history) - line_count} earlier messages[/dim]", + message_group=group_id, + ) + + except Exception as e: + emit_error(f"Failed to get current message history: {e}", message_group=group_id) + + def _show_other_sessions(self, group_id: str) -> None: + """Show information about other available autosave sessions.""" + try: + autosave_dir = Path(AUTOSAVE_DIR) + all_sessions = list_sessions(autosave_dir) + + current_session_name = get_current_autosave_session_name() + # Filter out the current session + other_sessions = [s for s in all_sessions if s != current_session_name] + + if other_sessions: + emit_info( + "\n[bold magenta]Other Autosave Sessions Available:[/bold magenta]", + message_group=group_id, + ) + + # Load metadata for each session to show more info + for session in other_sessions[:5]: # Limit to 5 to avoid spam + meta_path = autosave_dir / f"{session}_meta.json" + try: + with meta_path.open("r", encoding="utf-8") as f: + metadata = json.load(f) + timestamp = metadata.get("timestamp", "unknown") + message_count = metadata.get("message_count", 0) + total_tokens = metadata.get("total_tokens", 0) + + # Format timestamp nicely + if timestamp != "unknown": + try: + dt = datetime.fromisoformat(timestamp) + timestamp = dt.strftime("%Y-%m-%d %H:%M:%S") + except Exception: + pass + + emit_info( + f" [cyan]{session}[/cyan] - {message_count} messages ({total_tokens:,} tokens) - {timestamp}", + message_group=group_id, + ) + except Exception: + emit_info( + f" [cyan]{session}[/cyan] - [dim]metadata unavailable[/dim]", + message_group=group_id, + ) + + if len(other_sessions) > 5: + emit_info( + f" [dim]... and {len(other_sessions) - 5} more sessions[/dim]", + message_group=group_id, + ) + + emit_info( + "\n[dim]Tip: Use /load_context to load a different session[/dim]", + message_group=group_id, + ) + else: + emit_info( + "\n[dim]No other autosave sessions available[/dim]", + message_group=group_id, + ) + + except Exception as e: + emit_warning(f"Failed to list other sessions: {e}", message_group=group_id) + + +# Global instance for easy access +_history_command = HistoryCommand() + + +def handle_history_command(command: str) -> bool: + """Convenient function to handle /history commands. + + Args: + command: The full /history command string + + Returns: + True if command was handled successfully + """ + return _history_command.parse_and_execute(command) \ No newline at end of file diff --git a/code_puppy/command_line/load_context_completion.py b/code_puppy/command_line/load_context_completion.py new file mode 100644 index 00000000..f11a6ca7 --- /dev/null +++ b/code_puppy/command_line/load_context_completion.py @@ -0,0 +1,59 @@ +from pathlib import Path + +from prompt_toolkit.completion import Completer, Completion + +from code_puppy.config import CONFIG_DIR + + +class LoadContextCompleter(Completer): + def __init__(self, trigger: str = "/load_context"): + self.trigger = trigger + + def get_completions(self, document, complete_event): + text_before_cursor = document.text_before_cursor + stripped_text_for_trigger_check = text_before_cursor.lstrip() + + if not stripped_text_for_trigger_check.startswith(self.trigger): + return + + # Determine the part of the text that is relevant for this completer + actual_trigger_pos = text_before_cursor.find(self.trigger) + effective_input = text_before_cursor[actual_trigger_pos:] + + tokens = effective_input.split() + + # Case 1: Input is exactly the trigger (e.g., "/load_context") and nothing more + if ( + len(tokens) == 1 + and tokens[0] == self.trigger + and not effective_input.endswith(" ") + ): + yield Completion( + text=self.trigger + " ", + start_position=-len(tokens[0]), + display=self.trigger + " ", + display_meta="load saved context", + ) + return + + # Case 2: Input is trigger + space or trigger + partial session name + session_filter = "" + if len(tokens) > 1: # e.g., ["/load_context", "partial"] + session_filter = tokens[1] + + # Get available context files + try: + contexts_dir = Path(CONFIG_DIR) / "contexts" + if contexts_dir.exists(): + for pkl_file in contexts_dir.glob("*.pkl"): + session_name = pkl_file.stem # removes .pkl extension + if session_name.startswith(session_filter): + yield Completion( + session_name, + start_position=-len(session_filter), + display=session_name, + display_meta="saved context session", + ) + except Exception: + # Silently ignore errors (e.g., permission issues, non-existent dir) + pass diff --git a/code_puppy/command_line/mcp/__init__.py b/code_puppy/command_line/mcp/__init__.py new file mode 100644 index 00000000..a6198836 --- /dev/null +++ b/code_puppy/command_line/mcp/__init__.py @@ -0,0 +1,10 @@ +""" +MCP Command Line Interface - Namespace package for MCP server management commands. + +This package provides a modular command interface for managing MCP servers. +Each command is implemented in its own module for better maintainability. +""" + +from .handler import MCPCommandHandler + +__all__ = ["MCPCommandHandler"] diff --git a/code_puppy/command_line/mcp/add_command.py b/code_puppy/command_line/mcp/add_command.py new file mode 100644 index 00000000..0ce09831 --- /dev/null +++ b/code_puppy/command_line/mcp/add_command.py @@ -0,0 +1,183 @@ +""" +MCP Add Command - Adds new MCP servers from JSON configuration or wizard. +""" + +import json +import logging +import os +from typing import List, Optional + +from code_puppy.messaging import emit_info +from code_puppy.tui_state import is_tui_mode + +from .base import MCPCommandBase +from .wizard_utils import run_interactive_install_wizard + +# Configure logging +logger = logging.getLogger(__name__) + + +class AddCommand(MCPCommandBase): + """ + Command handler for adding MCP servers. + + Adds new MCP servers from JSON configuration or interactive wizard. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Add a new MCP server from JSON configuration or launch wizard. + + Usage: + /mcp add - Launch interactive wizard + /mcp add - Add server from JSON config + + Example JSON: + /mcp add {"name": "test", "type": "stdio", "command": "echo", "args": ["hello"]} + + Args: + args: Command arguments - JSON config or empty for wizard + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + # Check if in TUI mode and guide user to use Ctrl+T instead + if is_tui_mode() and not args: + emit_info( + "💡 In TUI mode, press Ctrl+T to open the MCP Install Wizard", + message_group=group_id, + ) + emit_info( + " The wizard provides a better interface for browsing and installing MCP servers.", + message_group=group_id, + ) + return + + try: + if args: + # Parse JSON from arguments + json_str = " ".join(args) + + try: + config_dict = json.loads(json_str) + except json.JSONDecodeError as e: + emit_info(f"Invalid JSON: {e}", message_group=group_id) + emit_info( + "Usage: /mcp add or /mcp add (for wizard)", + message_group=group_id, + ) + emit_info( + 'Example: /mcp add {"name": "test", "type": "stdio", "command": "echo"}', + message_group=group_id, + ) + return + + # Validate required fields + if "name" not in config_dict: + emit_info("Missing required field: 'name'", message_group=group_id) + return + if "type" not in config_dict: + emit_info("Missing required field: 'type'", message_group=group_id) + return + + # Add the server + success = self._add_server_from_json(config_dict, group_id) + + if success: + # Reload MCP servers + try: + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + except ImportError: + pass + + emit_info( + "Use '/mcp list' to see all servers", message_group=group_id + ) + + else: + # No arguments - launch interactive wizard with server templates + success = run_interactive_install_wizard(self.manager, group_id) + + if success: + # Reload the agent to pick up new server + try: + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + except ImportError: + pass + + except ImportError as e: + logger.error(f"Failed to import: {e}") + emit_info("Required module not available", message_group=group_id) + except Exception as e: + logger.error(f"Error in add command: {e}") + emit_info(f"[red]Error adding server: {e}[/red]", message_group=group_id) + + def _add_server_from_json(self, config_dict: dict, group_id: str) -> bool: + """ + Add a server from JSON configuration. + + Args: + config_dict: Server configuration dictionary + group_id: Message group ID + + Returns: + True if successful, False otherwise + """ + try: + from code_puppy.config import MCP_SERVERS_FILE + from code_puppy.mcp_.managed_server import ServerConfig + + # Extract required fields + name = config_dict.pop("name") + server_type = config_dict.pop("type") + enabled = config_dict.pop("enabled", True) + + # Everything else goes into config + server_config = ServerConfig( + id=f"{name}_{hash(name)}", + name=name, + type=server_type, + enabled=enabled, + config=config_dict, # Remaining fields are server-specific config + ) + + # Register the server + server_id = self.manager.register_server(server_config) + + if not server_id: + emit_info(f"Failed to add server '{name}'", message_group=group_id) + return False + + emit_info( + f"✅ Added server '{name}' (ID: {server_id})", message_group=group_id + ) + + # Save to mcp_servers.json for persistence + if os.path.exists(MCP_SERVERS_FILE): + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + else: + servers = {} + data = {"mcp_servers": servers} + + # Add new server + servers[name] = config_dict.copy() + servers[name]["type"] = server_type + + # Save back + os.makedirs(os.path.dirname(MCP_SERVERS_FILE), exist_ok=True) + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + + return True + + except Exception as e: + logger.error(f"Error adding server from JSON: {e}") + emit_info(f"[red]Failed to add server: {e}[/red]", message_group=group_id) + return False diff --git a/code_puppy/command_line/mcp/base.py b/code_puppy/command_line/mcp/base.py new file mode 100644 index 00000000..7e195c59 --- /dev/null +++ b/code_puppy/command_line/mcp/base.py @@ -0,0 +1,35 @@ +""" +MCP Command Base Classes - Shared functionality for MCP command handlers. + +Provides base classes and common utilities used across all MCP command modules. +""" + +import logging + +from rich.console import Console + +from code_puppy.mcp_.manager import get_mcp_manager + +# Configure logging +logger = logging.getLogger(__name__) + + +class MCPCommandBase: + """ + Base class for MCP command handlers. + + Provides common functionality like console access and MCP manager access + that all command handlers need. + """ + + def __init__(self): + """Initialize the base command handler.""" + self.console = Console() + self.manager = get_mcp_manager() + logger.debug(f"Initialized {self.__class__.__name__}") + + def generate_group_id(self) -> str: + """Generate a unique group ID for message grouping.""" + import uuid + + return str(uuid.uuid4()) diff --git a/code_puppy/command_line/mcp/handler.py b/code_puppy/command_line/mcp/handler.py new file mode 100644 index 00000000..dc10858e --- /dev/null +++ b/code_puppy/command_line/mcp/handler.py @@ -0,0 +1,133 @@ +""" +MCP Command Handler - Main router for MCP server management commands. + +This module provides the MCPCommandHandler class that routes MCP commands +to their respective command modules. +""" + +import logging +import shlex + +from code_puppy.messaging import emit_info + +from .add_command import AddCommand +from .base import MCPCommandBase +from .help_command import HelpCommand +from .install_command import InstallCommand + +# Import all command modules +from .list_command import ListCommand +from .logs_command import LogsCommand +from .remove_command import RemoveCommand +from .restart_command import RestartCommand +from .search_command import SearchCommand +from .start_all_command import StartAllCommand +from .start_command import StartCommand +from .status_command import StatusCommand +from .stop_all_command import StopAllCommand +from .stop_command import StopCommand +from .test_command import TestCommand + +# Configure logging +logger = logging.getLogger(__name__) + + +class MCPCommandHandler(MCPCommandBase): + """ + Main command handler for MCP server management operations. + + Routes MCP commands to their respective command modules. + Each command is implemented in its own module for better maintainability. + + Example usage: + handler = MCPCommandHandler() + handler.handle_mcp_command("/mcp list") + handler.handle_mcp_command("/mcp start filesystem") + handler.handle_mcp_command("/mcp status filesystem") + """ + + def __init__(self): + """Initialize the MCP command handler.""" + super().__init__() + + # Initialize command handlers + self._commands = { + "list": ListCommand(), + "start": StartCommand(), + "start-all": StartAllCommand(), + "stop": StopCommand(), + "stop-all": StopAllCommand(), + "restart": RestartCommand(), + "status": StatusCommand(), + "test": TestCommand(), + "add": AddCommand(), + "remove": RemoveCommand(), + "logs": LogsCommand(), + "search": SearchCommand(), + "install": InstallCommand(), + "help": HelpCommand(), + } + + logger.info("MCPCommandHandler initialized with all command modules") + + def handle_mcp_command(self, command: str) -> bool: + """ + Handle MCP commands and route to appropriate handler. + + Args: + command: The full command string (e.g., "/mcp list", "/mcp start server") + + Returns: + True if command was handled successfully, False otherwise + """ + group_id = self.generate_group_id() + + try: + # Remove /mcp prefix and parse arguments + command = command.strip() + if not command.startswith("/mcp"): + return False + + # Remove the /mcp prefix + args_str = command[4:].strip() + + # If no subcommand, show status dashboard + if not args_str: + self._commands["list"].execute([], group_id=group_id) + return True + + # Parse arguments using shlex for proper handling of quoted strings + try: + args = shlex.split(args_str) + except ValueError as e: + emit_info( + f"[red]Invalid command syntax: {e}[/red]", message_group=group_id + ) + return True + + if not args: + self._commands["list"].execute([], group_id=group_id) + return True + + subcommand = args[0].lower() + sub_args = args[1:] if len(args) > 1 else [] + + # Route to appropriate command handler + command_handler = self._commands.get(subcommand) + if command_handler: + command_handler.execute(sub_args, group_id=group_id) + return True + else: + emit_info( + f"[yellow]Unknown MCP subcommand: {subcommand}[/yellow]", + message_group=group_id, + ) + emit_info( + "Type '/mcp help' for available commands", message_group=group_id + ) + return True + + except Exception as e: + logger.error(f"Error handling MCP command '{command}': {e}") + emit_info(f"Error executing MCP command: {e}", message_group=group_id) + return True diff --git a/code_puppy/command_line/mcp/help_command.py b/code_puppy/command_line/mcp/help_command.py new file mode 100644 index 00000000..10364c51 --- /dev/null +++ b/code_puppy/command_line/mcp/help_command.py @@ -0,0 +1,146 @@ +""" +MCP Help Command - Shows help for all MCP commands. +""" + +import logging +from typing import List, Optional + +from rich.text import Text + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase + +# Configure logging +logger = logging.getLogger(__name__) + + +class HelpCommand(MCPCommandBase): + """ + Command handler for showing MCP command help. + + Displays comprehensive help information for all available MCP commands. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Show help for MCP commands. + + Args: + args: Command arguments (unused) + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + # Build help text programmatically to avoid markup conflicts + help_lines = [] + + # Title + help_lines.append( + Text("MCP Server Management Commands", style="bold magenta") + ) + help_lines.append(Text("")) + + # Registry Commands + help_lines.append(Text("Registry Commands:", style="bold cyan")) + help_lines.append( + Text("/mcp search", style="cyan") + + Text(" [query] Search 30+ pre-configured servers") + ) + help_lines.append( + Text("/mcp install", style="cyan") + + Text(" Install server from registry") + ) + help_lines.append(Text("")) + + # Core Commands + help_lines.append(Text("Core Commands:", style="bold cyan")) + help_lines.append( + Text("/mcp", style="cyan") + + Text(" Show server status dashboard") + ) + help_lines.append( + Text("/mcp list", style="cyan") + + Text(" List all registered servers") + ) + help_lines.append( + Text("/mcp start", style="cyan") + + Text(" Start a specific server") + ) + help_lines.append( + Text("/mcp start-all", style="cyan") + + Text(" Start all servers") + ) + help_lines.append( + Text("/mcp stop", style="cyan") + + Text(" Stop a specific server") + ) + help_lines.append( + Text("/mcp stop-all", style="cyan") + + Text(" [group_id] Stop all running servers") + ) + help_lines.append( + Text("/mcp restart", style="cyan") + + Text(" Restart a specific server") + ) + help_lines.append(Text("")) + + # Management Commands + help_lines.append(Text("Management Commands:", style="bold cyan")) + help_lines.append( + Text("/mcp status", style="cyan") + + Text(" [name] Show detailed status (all servers or specific)") + ) + help_lines.append( + Text("/mcp test", style="cyan") + + Text(" Test connectivity to a server") + ) + help_lines.append( + Text("/mcp logs", style="cyan") + + Text(" [limit] Show recent events (default limit: 10)") + ) + help_lines.append( + Text("/mcp add", style="cyan") + + Text(" [json] Add new server (JSON or wizard)") + ) + help_lines.append( + Text("/mcp remove", style="cyan") + + Text(" Remove/disable a server") + ) + help_lines.append( + Text("/mcp help", style="cyan") + + Text(" Show this help message") + ) + help_lines.append(Text("")) + + # Status Indicators + help_lines.append(Text("Status Indicators:", style="bold")) + help_lines.append( + Text("✓ Running ✗ Stopped ⚠ Error ⏸ Quarantined ⭐ Popular") + ) + help_lines.append(Text("")) + + # Examples + help_lines.append(Text("Examples:", style="bold")) + examples_text = """/mcp search database # Find database servers +/mcp install postgres # Install PostgreSQL server +/mcp start filesystem # Start a specific server +/mcp start-all # Start all servers at once +/mcp stop-all # Stop all running servers +/mcp add {"name": "test", "type": "stdio", "command": "echo"}""" + help_lines.append(Text(examples_text, style="dim")) + + # Combine all lines + final_text = Text() + for i, line in enumerate(help_lines): + if i > 0: + final_text.append("\n") + final_text.append_text(line) + + emit_info(final_text, message_group=group_id) + + except Exception as e: + logger.error(f"Error showing help: {e}") + emit_info(f"[red]Error showing help: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/install_command.py b/code_puppy/command_line/mcp/install_command.py new file mode 100644 index 00000000..7db29911 --- /dev/null +++ b/code_puppy/command_line/mcp/install_command.py @@ -0,0 +1,225 @@ +""" +MCP Install Command - Installs pre-configured MCP servers from the registry. +""" + +import logging +from typing import List, Optional + +from code_puppy.messaging import emit_info +from code_puppy.tui_state import is_tui_mode + +from .base import MCPCommandBase +from .wizard_utils import run_interactive_install_wizard + +# Configure logging +logger = logging.getLogger(__name__) + + +class InstallCommand(MCPCommandBase): + """ + Command handler for installing MCP servers from registry. + + Installs pre-configured MCP servers with optional interactive wizard. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Install a pre-configured MCP server from the registry. + + Args: + args: Server ID and optional custom name + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + # If in TUI mode, show message to use Ctrl+T + if is_tui_mode(): + emit_info( + "In TUI mode, use Ctrl+T to open the MCP Install Wizard", + message_group=group_id, + ) + return + + # In interactive mode, use the comprehensive installer + if not args: + # No args - launch interactive wizard + success = run_interactive_install_wizard(self.manager, group_id) + if success: + try: + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + except ImportError: + pass + return + + # Has args - install directly from catalog + server_id = args[0] + success = self._install_from_catalog(server_id, group_id) + if success: + try: + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + except ImportError: + pass + return + + except ImportError: + emit_info("Server registry not available", message_group=group_id) + except Exception as e: + logger.error(f"Error installing server: {e}") + emit_info(f"Installation failed: {e}", message_group=group_id) + + def _install_from_catalog(self, server_name_or_id: str, group_id: str) -> bool: + """Install a server directly from the catalog by name or ID.""" + try: + from code_puppy.mcp_.server_registry_catalog import catalog + from code_puppy.messaging import emit_prompt + + from .utils import find_server_id_by_name + from .wizard_utils import install_server_from_catalog + + # Try to find server by ID first, then by name/search + selected_server = catalog.get_by_id(server_name_or_id) + + if not selected_server: + # Try searching by name + results = catalog.search(server_name_or_id) + if not results: + emit_info( + f"❌ No server found matching '{server_name_or_id}'", + message_group=group_id, + ) + emit_info( + "Try '/mcp install' to browse available servers", + message_group=group_id, + ) + return False + elif len(results) == 1: + selected_server = results[0] + else: + # Multiple matches, show them + emit_info( + f"🔍 Multiple servers found matching '{server_name_or_id}':", + message_group=group_id, + ) + for i, server in enumerate(results[:5]): + indicators = [] + if server.verified: + indicators.append("✓") + if server.popular: + indicators.append("⭐") + + indicator_str = "" + if indicators: + indicator_str = " " + "".join(indicators) + + emit_info( + f" {i + 1}. {server.display_name}{indicator_str}", + message_group=group_id, + ) + emit_info(f" ID: {server.id}", message_group=group_id) + + emit_info( + "Please use the exact server ID: '/mcp install '", + message_group=group_id, + ) + return False + + # Show what we're installing + emit_info( + f"📦 Installing: {selected_server.display_name}", message_group=group_id + ) + description = ( + selected_server.description + if selected_server.description + else "No description available" + ) + emit_info(f"Description: {description}", message_group=group_id) + emit_info("", message_group=group_id) + + # Get custom name (default to server name) + server_name = emit_prompt( + f"Enter custom name for this server [{selected_server.name}]: " + ).strip() + if not server_name: + server_name = selected_server.name + + # Check if name already exists + existing_server = find_server_id_by_name(self.manager, server_name) + if existing_server: + override = emit_prompt( + f"Server '{server_name}' already exists. Override it? [y/N]: " + ) + if not override.lower().startswith("y"): + emit_info("Installation cancelled", message_group=group_id) + return False + + # Collect environment variables and command line arguments + env_vars = {} + cmd_args = {} + + # Get environment variables + required_env_vars = selected_server.get_environment_vars() + if required_env_vars: + emit_info( + "\n[yellow]Required Environment Variables:[/yellow]", + message_group=group_id, + ) + for var in required_env_vars: + # Check if already set in environment + import os + + current_value = os.environ.get(var, "") + if current_value: + emit_info( + f" {var}: [green]Already set[/green]", + message_group=group_id, + ) + env_vars[var] = current_value + else: + value = emit_prompt(f" Enter value for {var}: ").strip() + if value: + env_vars[var] = value + + # Get command line arguments + required_cmd_args = selected_server.get_command_line_args() + if required_cmd_args: + emit_info( + "\n[yellow]Command Line Arguments:[/yellow]", message_group=group_id + ) + for arg_config in required_cmd_args: + name = arg_config.get("name", "") + prompt = arg_config.get("prompt", name) + default = arg_config.get("default", "") + required = arg_config.get("required", True) + + # If required or has default, prompt user + if required or default: + arg_prompt = f" {prompt}" + if default: + arg_prompt += f" [{default}]" + if not required: + arg_prompt += " (optional)" + + value = emit_prompt(f"{arg_prompt}: ").strip() + if value: + cmd_args[name] = value + elif default: + cmd_args[name] = default + + # Install the server + return install_server_from_catalog( + self.manager, selected_server, server_name, env_vars, cmd_args, group_id + ) + + except ImportError: + emit_info("Server catalog not available", message_group=group_id) + return False + except Exception as e: + logger.error(f"Error installing from catalog: {e}") + emit_info(f"[red]Installation error: {e}[/red]", message_group=group_id) + return False diff --git a/code_puppy/command_line/mcp/list_command.py b/code_puppy/command_line/mcp/list_command.py new file mode 100644 index 00000000..f299a0af --- /dev/null +++ b/code_puppy/command_line/mcp/list_command.py @@ -0,0 +1,94 @@ +""" +MCP List Command - Lists all registered MCP servers in a formatted table. +""" + +import logging +from typing import List, Optional + +from rich.table import Table +from rich.text import Text + +from code_puppy.mcp_.managed_server import ServerState +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import format_state_indicator, format_uptime + +# Configure logging +logger = logging.getLogger(__name__) + + +class ListCommand(MCPCommandBase): + """ + Command handler for listing MCP servers. + + Displays all registered MCP servers in a formatted table with status information. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + List all registered MCP servers in a formatted table. + + Args: + args: Command arguments (unused for list command) + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + servers = self.manager.list_servers() + + if not servers: + emit_info("No MCP servers registered", message_group=group_id) + return + + # Create table for server list + table = Table(title="🔌 MCP Server Status Dashboard") + table.add_column("Name", style="cyan", no_wrap=True) + table.add_column("Type", style="dim", no_wrap=True) + table.add_column("State", justify="center") + table.add_column("Enabled", justify="center") + table.add_column("Uptime", style="dim") + table.add_column("Status", style="dim") + + for server in servers: + # Format state with appropriate color and icon + state_display = format_state_indicator(server.state) + + # Format enabled status + enabled_display = "✓" if server.enabled else "✗" + enabled_style = "green" if server.enabled else "red" + + # Format uptime + uptime_display = format_uptime(server.uptime_seconds) + + # Format status message + status_display = server.error_message or "OK" + if server.quarantined: + status_display = "Quarantined" + + table.add_row( + server.name, + server.type.upper(), + state_display, + Text(enabled_display, style=enabled_style), + uptime_display, + status_display, + ) + + emit_info(table, message_group=group_id) + + # Show summary + total = len(servers) + running = sum( + 1 for s in servers if s.state == ServerState.RUNNING and s.enabled + ) + emit_info( + f"\n📊 Summary: {running}/{total} servers running", + message_group=group_id, + ) + + except Exception as e: + logger.error(f"Error listing MCP servers: {e}") + emit_info(f"[red]Error listing servers: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/logs_command.py b/code_puppy/command_line/mcp/logs_command.py new file mode 100644 index 00000000..d282d8ec --- /dev/null +++ b/code_puppy/command_line/mcp/logs_command.py @@ -0,0 +1,126 @@ +""" +MCP Logs Command - Shows recent events/logs for a server. +""" + +import logging +from datetime import datetime +from typing import List, Optional + +from rich.table import Table +from rich.text import Text + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class LogsCommand(MCPCommandBase): + """ + Command handler for showing MCP server logs. + + Shows recent events/logs for a specific MCP server with configurable limit. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Show recent events/logs for a server. + + Args: + args: Command arguments, expects [server_name] and optional [limit] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info("Usage: /mcp logs [limit]", message_group=group_id) + return + + server_name = args[0] + limit = 10 # Default limit + + if len(args) > 1: + try: + limit = int(args[1]) + if limit <= 0 or limit > 100: + emit_info( + "Limit must be between 1 and 100, using default: 10", + message_group=group_id, + ) + limit = 10 + except ValueError: + emit_info( + f"Invalid limit '{args[1]}', using default: 10", + message_group=group_id, + ) + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info(f"Server '{server_name}' not found", message_group=group_id) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Get server status which includes recent events + status = self.manager.get_server_status(server_id) + + if not status.get("exists", True): + emit_info( + f"Server '{server_name}' status not available", + message_group=group_id, + ) + return + + recent_events = status.get("recent_events", []) + + if not recent_events: + emit_info( + f"No recent events for server: {server_name}", + message_group=group_id, + ) + return + + # Show events in a table + table = Table(title=f"📋 Recent Events for {server_name} (last {limit})") + table.add_column("Time", style="dim", no_wrap=True) + table.add_column("Event", style="cyan") + table.add_column("Details", style="dim") + + # Take only the requested number of events + events_to_show = ( + recent_events[-limit:] if len(recent_events) > limit else recent_events + ) + + for event in reversed(events_to_show): # Show newest first + timestamp = datetime.fromisoformat(event["timestamp"]) + time_str = timestamp.strftime("%H:%M:%S") + event_type = event["event_type"] + + # Format details + details = event.get("details", {}) + details_str = details.get("message", "") + if not details_str and "error" in details: + details_str = str(details["error"]) + + # Color code event types + event_style = "cyan" + if "error" in event_type.lower(): + event_style = "red" + elif event_type in ["started", "enabled", "registered"]: + event_style = "green" + elif event_type in ["stopped", "disabled"]: + event_style = "yellow" + + table.add_row( + time_str, Text(event_type, style=event_style), details_str or "-" + ) + emit_info(table, message_group=group_id) + + except Exception as e: + logger.error(f"Error getting logs for server '{server_name}': {e}") + emit_info(f"[red]Error getting logs: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/remove_command.py b/code_puppy/command_line/mcp/remove_command.py new file mode 100644 index 00000000..c94e68a0 --- /dev/null +++ b/code_puppy/command_line/mcp/remove_command.py @@ -0,0 +1,82 @@ +""" +MCP Remove Command - Removes an MCP server. +""" + +import json +import logging +import os +from typing import List, Optional + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class RemoveCommand(MCPCommandBase): + """ + Command handler for removing MCP servers. + + Removes a specific MCP server from the manager and configuration. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Remove an MCP server. + + Args: + args: Command arguments, expects [server_name] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info("Usage: /mcp remove ", message_group=group_id) + return + + server_name = args[0] + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info(f"Server '{server_name}' not found", message_group=group_id) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Actually remove the server + success = self.manager.remove_server(server_id) + + if success: + emit_info(f"✓ Removed server: {server_name}", message_group=group_id) + + # Also remove from mcp_servers.json + from code_puppy.config import MCP_SERVERS_FILE + + if os.path.exists(MCP_SERVERS_FILE): + try: + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + + # Remove the server if it exists + if server_name in servers: + del servers[server_name] + + # Save back + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + except Exception as e: + logger.warning(f"Could not update mcp_servers.json: {e}") + else: + emit_info( + f"✗ Failed to remove server: {server_name}", message_group=group_id + ) + + except Exception as e: + logger.error(f"Error removing server '{server_name}': {e}") + emit_info(f"[red]Error removing server: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/restart_command.py b/code_puppy/command_line/mcp/restart_command.py new file mode 100644 index 00000000..e763ef40 --- /dev/null +++ b/code_puppy/command_line/mcp/restart_command.py @@ -0,0 +1,92 @@ +""" +MCP Restart Command - Restarts a specific MCP server. +""" + +import logging +from typing import List, Optional + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class RestartCommand(MCPCommandBase): + """ + Command handler for restarting MCP servers. + + Stops, reloads configuration, and starts a specific MCP server. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Restart a specific MCP server. + + Args: + args: Command arguments, expects [server_name] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info("Usage: /mcp restart ", message_group=group_id) + return + + server_name = args[0] + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info(f"Server '{server_name}' not found", message_group=group_id) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Stop the server first + emit_info(f"Stopping server: {server_name}", message_group=group_id) + self.manager.stop_server_sync(server_id) + + # Then reload and start it + emit_info("Reloading configuration...", message_group=group_id) + reload_success = self.manager.reload_server(server_id) + + if reload_success: + emit_info(f"Starting server: {server_name}", message_group=group_id) + start_success = self.manager.start_server_sync(server_id) + + if start_success: + emit_info( + f"✓ Restarted server: {server_name}", message_group=group_id + ) + + # Reload the agent to pick up the server changes + try: + from code_puppy.agent import get_code_generation_agent + + get_code_generation_agent(force_reload=True) + emit_info( + "[dim]Agent reloaded with updated servers[/dim]", + message_group=group_id, + ) + except Exception as e: + logger.warning(f"Could not reload agent: {e}") + else: + emit_info( + f"✗ Failed to start server after reload: {server_name}", + message_group=group_id, + ) + else: + emit_info( + f"✗ Failed to reload server configuration: {server_name}", + message_group=group_id, + ) + + except Exception as e: + logger.error(f"Error restarting server '{server_name}': {e}") + emit_info( + f"[red]Failed to restart server: {e}[/red]", message_group=group_id + ) diff --git a/code_puppy/command_line/mcp/search_command.py b/code_puppy/command_line/mcp/search_command.py new file mode 100644 index 00000000..55bbbc13 --- /dev/null +++ b/code_puppy/command_line/mcp/search_command.py @@ -0,0 +1,117 @@ +""" +MCP Search Command - Searches for pre-configured MCP servers in the registry. +""" + +import logging +from typing import List, Optional + +from rich.table import Table + +from code_puppy.messaging import emit_info, emit_system_message + +from .base import MCPCommandBase + +# Configure logging +logger = logging.getLogger(__name__) + + +class SearchCommand(MCPCommandBase): + """ + Command handler for searching MCP server registry. + + Searches for pre-configured MCP servers with optional query terms. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Search for pre-configured MCP servers in the registry. + + Args: + args: Search query terms + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + from code_puppy.mcp_.server_registry_catalog import catalog + + if not args: + # Show popular servers if no query + emit_info( + "[bold cyan]Popular MCP Servers:[/bold cyan]\n", + message_group=group_id, + ) + servers = catalog.get_popular(15) + else: + query = " ".join(args) + emit_info( + f"[bold cyan]Searching for: {query}[/bold cyan]\n", + message_group=group_id, + ) + servers = catalog.search(query) + + if not servers: + emit_info( + "[yellow]No servers found matching your search[/yellow]", + message_group=group_id, + ) + emit_info( + "Try: /mcp search database, /mcp search file, /mcp search git", + message_group=group_id, + ) + return + + # Create results table + table = Table(show_header=True, header_style="bold magenta") + table.add_column("ID", style="cyan", width=20) + table.add_column("Name", style="green") + table.add_column("Category", style="yellow") + table.add_column("Description", style="white") + table.add_column("Tags", style="dim") + + for server in servers[:20]: # Limit to 20 results + tags = ", ".join(server.tags[:3]) # Show first 3 tags + if len(server.tags) > 3: + tags += "..." + + # Add verified/popular indicators + indicators = [] + if server.verified: + indicators.append("✓") + if server.popular: + indicators.append("⭐") + name_display = server.display_name + if indicators: + name_display += f" {''.join(indicators)}" + + table.add_row( + server.id, + name_display, + server.category, + server.description[:50] + "..." + if len(server.description) > 50 + else server.description, + tags, + ) + + # The first message established the group, subsequent messages will auto-group + emit_system_message(table, message_group=group_id) + emit_info("\n[dim]✓ = Verified ⭐ = Popular[/dim]", message_group=group_id) + emit_info( + "[yellow]To install:[/yellow] /mcp install ", message_group=group_id + ) + emit_info( + "[yellow]For details:[/yellow] /mcp search ", + message_group=group_id, + ) + + except ImportError: + emit_info( + "[red]Server registry not available[/red]", message_group=group_id + ) + except Exception as e: + logger.error(f"Error searching server registry: {e}") + emit_info( + f"[red]Error searching servers: {e}[/red]", message_group=group_id + ) diff --git a/code_puppy/command_line/mcp/start_all_command.py b/code_puppy/command_line/mcp/start_all_command.py new file mode 100644 index 00000000..7f8e1a9e --- /dev/null +++ b/code_puppy/command_line/mcp/start_all_command.py @@ -0,0 +1,123 @@ +""" +MCP Start All Command - Starts all registered MCP servers. +""" + +import logging +import time +from typing import List, Optional + +from code_puppy.mcp_.managed_server import ServerState +from code_puppy.messaging import emit_info + +from ...agents import get_current_agent +from .base import MCPCommandBase + +# Configure logging +logger = logging.getLogger(__name__) + + +class StartAllCommand(MCPCommandBase): + """ + Command handler for starting all MCP servers. + + Starts all registered MCP servers and provides a summary of results. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Start all registered MCP servers. + + Args: + args: Command arguments (unused) + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + servers = self.manager.list_servers() + + if not servers: + emit_info( + "[yellow]No servers registered[/yellow]", message_group=group_id + ) + return + + started_count = 0 + failed_count = 0 + already_running = 0 + + emit_info(f"Starting {len(servers)} servers...", message_group=group_id) + + for server_info in servers: + server_id = server_info.id + server_name = server_info.name + + # Skip if already running + if server_info.state == ServerState.RUNNING: + already_running += 1 + emit_info( + f" • {server_name}: already running", message_group=group_id + ) + continue + + # Try to start the server + success = self.manager.start_server_sync(server_id) + + if success: + started_count += 1 + emit_info( + f" [green]✓ Started: {server_name}[/green]", + message_group=group_id, + ) + else: + failed_count += 1 + emit_info( + f" [red]✗ Failed: {server_name}[/red]", message_group=group_id + ) + + # Summary + emit_info("", message_group=group_id) + if started_count > 0: + emit_info( + f"[green]Started {started_count} server(s)[/green]", + message_group=group_id, + ) + if already_running > 0: + emit_info( + f"{already_running} server(s) already running", + message_group=group_id, + ) + if failed_count > 0: + emit_info( + f"[yellow]Failed to start {failed_count} server(s)[/yellow]", + message_group=group_id, + ) + + # Reload agent if any servers were started + if started_count > 0: + # Give async tasks a moment to complete before reloading agent + try: + import asyncio + + asyncio.get_running_loop() # Check if in async context + # If we're in async context, wait a bit for servers to start + time.sleep(0.5) # Small delay to let async tasks progress + except RuntimeError: + pass # No async loop, servers will start when agent uses them + + try: + agent = get_current_agent() + agent.reload_code_generation_agent() + emit_info( + "[dim]Agent reloaded with updated servers[/dim]", + message_group=group_id, + ) + except Exception as e: + logger.warning(f"Could not reload agent: {e}") + + except Exception as e: + logger.error(f"Error starting all servers: {e}") + emit_info( + f"[red]Failed to start servers: {e}[/red]", message_group=group_id + ) diff --git a/code_puppy/command_line/mcp/start_command.py b/code_puppy/command_line/mcp/start_command.py new file mode 100644 index 00000000..dd52381d --- /dev/null +++ b/code_puppy/command_line/mcp/start_command.py @@ -0,0 +1,92 @@ +""" +MCP Start Command - Starts a specific MCP server. +""" + +import logging +import time +from typing import List, Optional + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class StartCommand(MCPCommandBase): + """ + Command handler for starting MCP servers. + + Starts a specific MCP server by name and reloads the agent. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Start a specific MCP server. + + Args: + args: Command arguments, expects [server_name] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info( + "[yellow]Usage: /mcp start [/yellow]", + message_group=group_id, + ) + return + + server_name = args[0] + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info( + f"[red]Server '{server_name}' not found[/red]", + message_group=group_id, + ) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Start the server (enable and start process) + success = self.manager.start_server_sync(server_id) + + if success: + # This and subsequent messages will auto-group with the first message + emit_info( + f"[green]✓ Started server: {server_name}[/green]", + message_group=group_id, + ) + + # Give async tasks a moment to complete + try: + import asyncio + + asyncio.get_running_loop() # Check if in async context + # If we're in async context, wait a bit for server to start + time.sleep(0.5) # Small delay to let async tasks progress + except RuntimeError: + pass # No async loop, server will start when agent uses it + + # Reload the agent to pick up the newly enabled server + try: + emit_info( + "[dim]Agent reloaded with updated servers[/dim]", + message_group=group_id, + ) + except Exception as e: + logger.warning(f"Could not reload agent: {e}") + else: + emit_info( + f"[red]✗ Failed to start server: {server_name}[/red]", + message_group=group_id, + ) + + except Exception as e: + logger.error(f"Error starting server '{server_name}': {e}") + emit_info(f"[red]Failed to start server: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/status_command.py b/code_puppy/command_line/mcp/status_command.py new file mode 100644 index 00000000..f35c5017 --- /dev/null +++ b/code_puppy/command_line/mcp/status_command.py @@ -0,0 +1,185 @@ +""" +MCP Status Command - Shows detailed status for MCP servers. +""" + +import logging +from datetime import datetime +from typing import List, Optional + +from rich.panel import Panel + +from code_puppy.mcp_.managed_server import ServerState +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .list_command import ListCommand +from .utils import ( + find_server_id_by_name, + format_state_indicator, + format_uptime, + suggest_similar_servers, +) + +# Configure logging +logger = logging.getLogger(__name__) + + +class StatusCommand(MCPCommandBase): + """ + Command handler for showing MCP server status. + + Shows detailed status for a specific server or brief status for all servers. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Show detailed status for a specific server or all servers. + + Args: + args: Command arguments, expects [server_name] (optional) + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + if args: + # Show detailed status for specific server + server_name = args[0] + server_id = find_server_id_by_name(self.manager, server_name) + + if not server_id: + emit_info( + f"Server '{server_name}' not found", message_group=group_id + ) + suggest_similar_servers( + self.manager, server_name, group_id=group_id + ) + return + + self._show_detailed_server_status(server_id, server_name, group_id) + else: + # Show brief status for all servers + list_command = ListCommand() + list_command.execute([], group_id=group_id) + + except Exception as e: + logger.error(f"Error showing server status: {e}") + emit_info(f"Failed to get server status: {e}", message_group=group_id) + + def _show_detailed_server_status( + self, server_id: str, server_name: str, group_id: Optional[str] = None + ) -> None: + """ + Show comprehensive status information for a specific server. + + Args: + server_id: ID of the server + server_name: Name of the server + group_id: Optional message group ID + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + status = self.manager.get_server_status(server_id) + + if not status.get("exists", True): + emit_info( + f"Server '{server_name}' not found or not accessible", + message_group=group_id, + ) + return + + # Create detailed status panel + status_lines = [] + + # Basic information + status_lines.append(f"[bold]Server:[/bold] {server_name}") + status_lines.append(f"[bold]ID:[/bold] {server_id}") + status_lines.append( + f"[bold]Type:[/bold] {status.get('type', 'unknown').upper()}" + ) + + # State and status + state = status.get("state", "unknown") + state_display = format_state_indicator( + ServerState(state) + if state in [s.value for s in ServerState] + else ServerState.STOPPED + ) + status_lines.append(f"[bold]State:[/bold] {state_display}") + + enabled = status.get("enabled", False) + status_lines.append( + f"[bold]Enabled:[/bold] {'✓ Yes' if enabled else '✗ No'}" + ) + + # Check async lifecycle manager status if available + try: + from code_puppy.mcp_.async_lifecycle import get_lifecycle_manager + + lifecycle_mgr = get_lifecycle_manager() + if lifecycle_mgr.is_running(server_id): + status_lines.append( + "[bold]Process:[/bold] [green]✓ Active (subprocess/connection running)[/green]" + ) + else: + status_lines.append("[bold]Process:[/bold] [dim]Not active[/dim]") + except Exception: + pass # Lifecycle manager not available + + quarantined = status.get("quarantined", False) + if quarantined: + status_lines.append("[bold]Quarantined:[/bold] [yellow]⚠ Yes[/yellow]") + + # Timing information + uptime = status.get("tracker_uptime") + if uptime: + uptime_str = format_uptime( + uptime.total_seconds() + if hasattr(uptime, "total_seconds") + else uptime + ) + status_lines.append(f"[bold]Uptime:[/bold] {uptime_str}") + + # Error information + error_msg = status.get("error_message") + if error_msg: + status_lines.append(f"[bold]Error:[/bold] [red]{error_msg}[/red]") + + # Event information + event_count = status.get("recent_events_count", 0) + status_lines.append(f"[bold]Recent Events:[/bold] {event_count}") + + # Metadata + metadata = status.get("tracker_metadata", {}) + if metadata: + status_lines.append(f"[bold]Metadata:[/bold] {len(metadata)} keys") + + # Create and show the panel + panel_content = "\n".join(status_lines) + panel = Panel( + panel_content, title=f"🔌 {server_name} Status", border_style="cyan" + ) + + emit_info(panel, message_group=group_id) + + # Show recent events if available + recent_events = status.get("recent_events", []) + if recent_events: + emit_info("\n📋 Recent Events:", message_group=group_id) + for event in recent_events[-5:]: # Show last 5 events + timestamp = datetime.fromisoformat(event["timestamp"]) + time_str = timestamp.strftime("%H:%M:%S") + emit_info( + f" {time_str}: {event['message']}", message_group=group_id + ) + + except Exception as e: + logger.error( + f"Error getting detailed status for server '{server_name}': {e}" + ) + emit_info( + f"[red]Error getting server status: {e}[/red]", message_group=group_id + ) diff --git a/code_puppy/command_line/mcp/stop_all_command.py b/code_puppy/command_line/mcp/stop_all_command.py new file mode 100644 index 00000000..a2867306 --- /dev/null +++ b/code_puppy/command_line/mcp/stop_all_command.py @@ -0,0 +1,106 @@ +""" +MCP Stop All Command - Stops all running MCP servers. +""" + +import logging +import time +from typing import List, Optional + +from code_puppy.mcp_.managed_server import ServerState +from code_puppy.messaging import emit_info + +from ...agents import get_current_agent +from .base import MCPCommandBase + +# Configure logging +logger = logging.getLogger(__name__) + + +class StopAllCommand(MCPCommandBase): + """ + Command handler for stopping all MCP servers. + + Stops all running MCP servers and provides a summary of results. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Stop all running MCP servers. + + Args: + args: Command arguments (unused) + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + try: + servers = self.manager.list_servers() + + if not servers: + emit_info("No servers registered", message_group=group_id) + return + + stopped_count = 0 + failed_count = 0 + + # Count running servers + running_servers = [s for s in servers if s.state == ServerState.RUNNING] + + if not running_servers: + emit_info("No servers are currently running", message_group=group_id) + return + + emit_info( + f"Stopping {len(running_servers)} running server(s)...", + message_group=group_id, + ) + + for server_info in running_servers: + server_id = server_info.id + server_name = server_info.name + + # Try to stop the server + success = self.manager.stop_server_sync(server_id) + + if success: + stopped_count += 1 + emit_info(f" ✓ Stopped: {server_name}", message_group=group_id) + else: + failed_count += 1 + emit_info(f" ✗ Failed: {server_name}", message_group=group_id) + + # Summary + emit_info("", message_group=group_id) + if stopped_count > 0: + emit_info(f"Stopped {stopped_count} server(s)", message_group=group_id) + if failed_count > 0: + emit_info( + f"Failed to stop {failed_count} server(s)", message_group=group_id + ) + + # Reload agent if any servers were stopped + if stopped_count > 0: + # Give async tasks a moment to complete before reloading agent + try: + import asyncio + + asyncio.get_running_loop() # Check if in async context + # If we're in async context, wait a bit for servers to stop + time.sleep(0.5) # Small delay to let async tasks progress + except RuntimeError: + pass # No async loop, servers will stop when needed + + try: + agent = get_current_agent() + agent.reload_code_generation_agent() + emit_info( + "[dim]Agent reloaded with updated servers[/dim]", + message_group=group_id, + ) + except Exception as e: + logger.warning(f"Could not reload agent: {e}") + + except Exception as e: + logger.error(f"Error stopping all servers: {e}") + emit_info(f"Failed to stop servers: {e}", message_group=group_id) diff --git a/code_puppy/command_line/mcp/stop_command.py b/code_puppy/command_line/mcp/stop_command.py new file mode 100644 index 00000000..5cb39bc4 --- /dev/null +++ b/code_puppy/command_line/mcp/stop_command.py @@ -0,0 +1,76 @@ +""" +MCP Stop Command - Stops a specific MCP server. +""" + +import logging +from typing import List, Optional + +from code_puppy.messaging import emit_info + +from ...agents import get_current_agent +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class StopCommand(MCPCommandBase): + """ + Command handler for stopping MCP servers. + + Stops a specific MCP server by name and reloads the agent. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Stop a specific MCP server. + + Args: + args: Command arguments, expects [server_name] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info( + "[yellow]Usage: /mcp stop [/yellow]", + message_group=group_id, + ) + return + + server_name = args[0] + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info(f"Server '{server_name}' not found", message_group=group_id) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Stop the server (disable and stop process) + success = self.manager.stop_server_sync(server_id) + + if success: + emit_info(f"✓ Stopped server: {server_name}", message_group=group_id) + + # Reload the agent to remove the disabled server + try: + agent = get_current_agent() + agent.reload_code_generation_agent() + emit_info( + "[dim]Agent reloaded with updated servers[/dim]", + message_group=group_id, + ) + except Exception as e: + logger.warning(f"Could not reload agent: {e}") + else: + emit_info( + f"✗ Failed to stop server: {server_name}", message_group=group_id + ) + + except Exception as e: + logger.error(f"Error stopping server '{server_name}': {e}") + emit_info(f"[red]Failed to stop server: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/test_command.py b/code_puppy/command_line/mcp/test_command.py new file mode 100644 index 00000000..cb54991f --- /dev/null +++ b/code_puppy/command_line/mcp/test_command.py @@ -0,0 +1,107 @@ +""" +MCP Test Command - Tests connectivity to a specific MCP server. +""" + +import logging +from typing import List, Optional + +from code_puppy.messaging import emit_info + +from .base import MCPCommandBase +from .utils import find_server_id_by_name, suggest_similar_servers + +# Configure logging +logger = logging.getLogger(__name__) + + +class TestCommand(MCPCommandBase): + """ + Command handler for testing MCP server connectivity. + + Tests connectivity and basic functionality of a specific MCP server. + """ + + def execute(self, args: List[str], group_id: Optional[str] = None) -> None: + """ + Test connectivity to a specific MCP server. + + Args: + args: Command arguments, expects [server_name] + group_id: Optional message group ID for grouping related messages + """ + if group_id is None: + group_id = self.generate_group_id() + + if not args: + emit_info("Usage: /mcp test ", message_group=group_id) + return + + server_name = args[0] + + try: + # Find server by name + server_id = find_server_id_by_name(self.manager, server_name) + if not server_id: + emit_info(f"Server '{server_name}' not found", message_group=group_id) + suggest_similar_servers(self.manager, server_name, group_id=group_id) + return + + # Get managed server + managed_server = self.manager.get_server(server_id) + if not managed_server: + emit_info( + f"Server '{server_name}' not accessible", message_group=group_id + ) + return + + emit_info( + f"🔍 Testing connectivity to server: {server_name}", + message_group=group_id, + ) + + # Basic connectivity test - try to get the pydantic server + try: + managed_server.get_pydantic_server() # Test server instantiation + emit_info( + "✓ Server instance created successfully", message_group=group_id + ) + + # Try to get server info if available + emit_info( + f" • Server type: {managed_server.config.type}", + message_group=group_id, + ) + emit_info( + f" • Server enabled: {managed_server.is_enabled()}", + message_group=group_id, + ) + emit_info( + f" • Server quarantined: {managed_server.is_quarantined()}", + message_group=group_id, + ) + + if not managed_server.is_enabled(): + emit_info( + " • Server is disabled - enable it with '/mcp start'", + message_group=group_id, + ) + + if managed_server.is_quarantined(): + emit_info( + " • Server is quarantined - may have recent errors", + message_group=group_id, + ) + + emit_info( + f"✓ Connectivity test passed for: {server_name}", + message_group=group_id, + ) + + except Exception as test_error: + emit_info( + f"✗ Connectivity test failed: {test_error}", message_group=group_id + ) + + except Exception as e: + logger.error(f"Error testing server '{server_name}': {e}") + emit_info(f"[red]Error testing server: {e}[/red]", message_group=group_id) diff --git a/code_puppy/command_line/mcp/utils.py b/code_puppy/command_line/mcp/utils.py new file mode 100644 index 00000000..8f27b99d --- /dev/null +++ b/code_puppy/command_line/mcp/utils.py @@ -0,0 +1,129 @@ +""" +MCP Command Utilities - Shared helper functions for MCP command handlers. + +Provides common utility functions used across multiple MCP command modules. +""" + +from typing import Optional + +from rich.text import Text + +from code_puppy.mcp_.managed_server import ServerState + + +def format_state_indicator(state: ServerState) -> Text: + """ + Format a server state with appropriate color and icon. + + Args: + state: Server state to format + + Returns: + Rich Text object with colored state indicator + """ + state_map = { + ServerState.RUNNING: ("✓ Run", "green"), + ServerState.STOPPED: ("✗ Stop", "red"), + ServerState.STARTING: ("↗ Start", "yellow"), + ServerState.STOPPING: ("↙ Stop", "yellow"), + ServerState.ERROR: ("⚠ Err", "red"), + ServerState.QUARANTINED: ("⏸ Quar", "yellow"), + } + + display, color = state_map.get(state, ("? Unk", "dim")) + return Text(display, style=color) + + +def format_uptime(uptime_seconds: Optional[float]) -> str: + """ + Format uptime in a human-readable format. + + Args: + uptime_seconds: Uptime in seconds, or None + + Returns: + Formatted uptime string + """ + if uptime_seconds is None or uptime_seconds <= 0: + return "-" + + # Convert to readable format + if uptime_seconds < 60: + return f"{int(uptime_seconds)}s" + elif uptime_seconds < 3600: + minutes = int(uptime_seconds // 60) + seconds = int(uptime_seconds % 60) + return f"{minutes}m {seconds}s" + else: + hours = int(uptime_seconds // 3600) + minutes = int((uptime_seconds % 3600) // 60) + return f"{hours}h {minutes}m" + + +def find_server_id_by_name(manager, server_name: str) -> Optional[str]: + """ + Find a server ID by its name. + + Args: + manager: MCP manager instance + server_name: Name of the server to find + + Returns: + Server ID if found, None otherwise + """ + import logging + + logger = logging.getLogger(__name__) + + try: + servers = manager.list_servers() + for server in servers: + if server.name.lower() == server_name.lower(): + return server.id + return None + except Exception as e: + logger.error(f"Error finding server by name '{server_name}': {e}") + return None + + +def suggest_similar_servers( + manager, server_name: str, group_id: Optional[str] = None +) -> None: + """ + Suggest similar server names when a server is not found. + + Args: + manager: MCP manager instance + server_name: The server name that was not found + group_id: Optional message group ID for grouping related messages + """ + import logging + + from code_puppy.messaging import emit_info + + logger = logging.getLogger(__name__) + + try: + servers = manager.list_servers() + if not servers: + emit_info("No servers are registered", message_group=group_id) + return + + # Simple suggestion based on partial matching + suggestions = [] + server_name_lower = server_name.lower() + + for server in servers: + if server_name_lower in server.name.lower(): + suggestions.append(server.name) + + if suggestions: + emit_info(f"Did you mean: {', '.join(suggestions)}", message_group=group_id) + else: + server_names = [s.name for s in servers] + emit_info( + f"Available servers: {', '.join(server_names)}", message_group=group_id + ) + + except Exception as e: + logger.error(f"Error suggesting similar servers: {e}") diff --git a/code_puppy/command_line/mcp/wizard_utils.py b/code_puppy/command_line/mcp/wizard_utils.py new file mode 100644 index 00000000..946e7ba8 --- /dev/null +++ b/code_puppy/command_line/mcp/wizard_utils.py @@ -0,0 +1,330 @@ +""" +MCP Interactive Wizard Utilities - Shared interactive installation wizard functions. + +Provides interactive functionality for installing and configuring MCP servers. +""" + +import logging +from typing import Any, Dict, Optional + +from code_puppy.messaging import emit_info, emit_prompt + +# Configure logging +logger = logging.getLogger(__name__) + + +def run_interactive_install_wizard(manager, group_id: str) -> bool: + """ + Run the interactive MCP server installation wizard. + + Args: + manager: MCP manager instance + group_id: Message group ID for grouping related messages + + Returns: + True if installation was successful, False otherwise + """ + try: + # Show welcome message + emit_info("🚀 MCP Server Installation Wizard", message_group=group_id) + emit_info( + "This wizard will help you install pre-configured MCP servers", + message_group=group_id, + ) + emit_info("", message_group=group_id) + + # Let user select a server + selected_server = interactive_server_selection(group_id) + if not selected_server: + return False + + # Get custom name + server_name = interactive_get_server_name(selected_server, group_id) + if not server_name: + return False + + # Collect environment variables and command line arguments + env_vars = {} + cmd_args = {} + + # Get environment variables + required_env_vars = selected_server.get_environment_vars() + if required_env_vars: + emit_info( + "\n[yellow]Required Environment Variables:[/yellow]", + message_group=group_id, + ) + for var in required_env_vars: + # Check if already set in environment + import os + + current_value = os.environ.get(var, "") + if current_value: + emit_info( + f" {var}: [green]Already set[/green]", message_group=group_id + ) + env_vars[var] = current_value + else: + value = emit_prompt(f" Enter value for {var}: ").strip() + if value: + env_vars[var] = value + + # Get command line arguments + required_cmd_args = selected_server.get_command_line_args() + if required_cmd_args: + emit_info( + "\n[yellow]Command Line Arguments:[/yellow]", message_group=group_id + ) + for arg_config in required_cmd_args: + name = arg_config.get("name", "") + prompt = arg_config.get("prompt", name) + default = arg_config.get("default", "") + required = arg_config.get("required", True) + + # If required or has default, prompt user + if required or default: + arg_prompt = f" {prompt}" + if default: + arg_prompt += f" [{default}]" + if not required: + arg_prompt += " (optional)" + + value = emit_prompt(f"{arg_prompt}: ").strip() + if value: + cmd_args[name] = value + elif default: + cmd_args[name] = default + + # Configure the server + return interactive_configure_server( + manager, selected_server, server_name, group_id, env_vars, cmd_args + ) + + except ImportError: + emit_info("[red]Server catalog not available[/red]", message_group=group_id) + return False + except Exception as e: + logger.error(f"Error in interactive wizard: {e}") + emit_info(f"[red]Wizard error: {e}[/red]", message_group=group_id) + return False + + +def interactive_server_selection(group_id: str): + """ + Interactive server selection from catalog. + + Returns selected server or None if cancelled. + """ + # This is a simplified version - the full implementation would have + # category browsing, search, etc. For now, we'll just show popular servers + try: + from code_puppy.mcp_.server_registry_catalog import catalog + + servers = catalog.get_popular(10) + if not servers: + emit_info( + "[red]No servers available in catalog[/red]", message_group=group_id + ) + return None + + emit_info("Popular MCP Servers:", message_group=group_id) + for i, server in enumerate(servers, 1): + indicators = [] + if server.verified: + indicators.append("✓") + if server.popular: + indicators.append("⭐") + + indicator_str = "" + if indicators: + indicator_str = " " + "".join(indicators) + + emit_info( + f"{i:2}. {server.display_name}{indicator_str}", message_group=group_id + ) + emit_info(f" {server.description[:80]}...", message_group=group_id) + + choice = emit_prompt( + "Enter number (1-{}) or 'q' to quit: ".format(len(servers)) + ) + + if choice.lower() == "q": + return None + + try: + index = int(choice) - 1 + if 0 <= index < len(servers): + return servers[index] + else: + emit_info("[red]Invalid selection[/red]", message_group=group_id) + return None + except ValueError: + emit_info("[red]Invalid input[/red]", message_group=group_id) + return None + + except Exception as e: + logger.error(f"Error in server selection: {e}") + return None + + +def interactive_get_server_name(selected_server, group_id: str) -> Optional[str]: + """ + Get custom server name from user. + + Returns server name or None if cancelled. + """ + default_name = selected_server.name + server_name = emit_prompt(f"Enter name for this server [{default_name}]: ").strip() + + if not server_name: + server_name = default_name + + return server_name + + +def interactive_configure_server( + manager, + selected_server, + server_name: str, + group_id: str, + env_vars: Dict[str, Any], + cmd_args: Dict[str, Any], +) -> bool: + """ + Configure and install the selected server. + + Returns True if successful, False otherwise. + """ + try: + # Check if server already exists + from .utils import find_server_id_by_name + + existing_server = find_server_id_by_name(manager, server_name) + if existing_server: + override = emit_prompt( + f"Server '{server_name}' already exists. Override? [y/N]: " + ) + if not override.lower().startswith("y"): + emit_info("Installation cancelled", message_group=group_id) + return False + + # Show confirmation + emit_info(f"Installing: {selected_server.display_name}", message_group=group_id) + emit_info(f"Name: {server_name}", message_group=group_id) + + if env_vars: + emit_info("Environment Variables:", message_group=group_id) + for var, value in env_vars.items(): + emit_info(f" {var}: [hidden]{value}[/hidden]", message_group=group_id) + + if cmd_args: + emit_info("Command Line Arguments:", message_group=group_id) + for arg, value in cmd_args.items(): + emit_info(f" {arg}: {value}", message_group=group_id) + + confirm = emit_prompt("Proceed with installation? [Y/n]: ") + if confirm.lower().startswith("n"): + emit_info("Installation cancelled", message_group=group_id) + return False + + # Install the server (simplified version) + return install_server_from_catalog( + manager, selected_server, server_name, env_vars, cmd_args, group_id + ) + + except Exception as e: + logger.error(f"Error configuring server: {e}") + emit_info(f"[red]Configuration error: {e}[/red]", message_group=group_id) + return False + + +def install_server_from_catalog( + manager, + selected_server, + server_name: str, + env_vars: Dict[str, Any], + cmd_args: Dict[str, Any], + group_id: str, +) -> bool: + """ + Install a server from the catalog with the given configuration. + + Returns True if successful, False otherwise. + """ + try: + import json + import os + + from code_puppy.config import MCP_SERVERS_FILE + from code_puppy.mcp_.managed_server import ServerConfig + + # Set environment variables in the current environment + for var, value in env_vars.items(): + os.environ[var] = value + + # Get server config with command line argument overrides + config_dict = selected_server.to_server_config(server_name, **cmd_args) + + # Update the config with actual environment variable values + if "env" in config_dict: + for env_key, env_value in config_dict["env"].items(): + # If it's a placeholder like $GITHUB_TOKEN, replace with actual value + if env_value.startswith("$"): + var_name = env_value[1:] # Remove the $ + if var_name in env_vars: + config_dict["env"][env_key] = env_vars[var_name] + + # Create ServerConfig + server_config = ServerConfig( + id=server_name, + name=server_name, + type=selected_server.type, + enabled=True, + config=config_dict, + ) + + # Register with manager + server_id = manager.register_server(server_config) + + if not server_id: + emit_info( + "[red]Failed to register server with manager[/red]", + message_group=group_id, + ) + return False + + # Save to mcp_servers.json for persistence + if os.path.exists(MCP_SERVERS_FILE): + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + else: + servers = {} + data = {"mcp_servers": servers} + + # Add new server + # Copy the config dict and add type before saving + save_config = config_dict.copy() + save_config["type"] = selected_server.type + servers[server_name] = save_config + + # Save back + os.makedirs(os.path.dirname(MCP_SERVERS_FILE), exist_ok=True) + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + + emit_info( + f"[green]✓ Successfully installed server: {server_name}[/green]", + message_group=group_id, + ) + emit_info( + "Use '/mcp start {}' to start the server".format(server_name), + message_group=group_id, + ) + + return True + + except Exception as e: + logger.error(f"Error installing server: {e}") + emit_info(f"[red]Installation failed: {e}[/red]", message_group=group_id) + return False diff --git a/code_puppy/command_line/model_picker_completion.py b/code_puppy/command_line/model_picker_completion.py new file mode 100644 index 00000000..bf1e6a02 --- /dev/null +++ b/code_puppy/command_line/model_picker_completion.py @@ -0,0 +1,129 @@ +import os +from typing import Iterable, Optional + +from prompt_toolkit import PromptSession +from prompt_toolkit.completion import Completer, Completion +from prompt_toolkit.document import Document +from prompt_toolkit.history import FileHistory + +from code_puppy.config import get_global_model_name, set_model_name +from code_puppy.model_factory import ModelFactory + + +def load_model_names(): + """Load model names from the config that's fetched from the endpoint.""" + models_config = ModelFactory.load_config() + return list(models_config.keys()) + + +def get_active_model(): + """ + Returns the active model from the config using get_model_name(). + This ensures consistency across the codebase by always using the config value. + """ + return get_global_model_name() + + +def set_active_model(model_name: str): + """ + Sets the active model name by updating the config (for persistence). + """ + set_model_name(model_name) + # Reload the currently active agent so the new model takes effect immediately + try: + from code_puppy.agents import get_current_agent + + current_agent = get_current_agent() + # JSON agents may need to refresh their config before reload + if hasattr(current_agent, "refresh_config"): + try: + current_agent.refresh_config() + except Exception: + # Non-fatal, continue to reload + ... + current_agent.reload_code_generation_agent() + except Exception: + # Swallow errors to avoid breaking the prompt flow; model persists for next run + pass + + +class ModelNameCompleter(Completer): + """ + A completer that triggers on '/model' to show available models from models.json. + Only '/model' (not just '/') will trigger the dropdown. + """ + + def __init__(self, trigger: str = "/model"): + self.trigger = trigger + self.model_names = load_model_names() + + def get_completions( + self, document: Document, complete_event + ) -> Iterable[Completion]: + text = document.text + cursor_position = document.cursor_position + text_before_cursor = text[:cursor_position] + if self.trigger not in text_before_cursor: + return + symbol_pos = text_before_cursor.rfind(self.trigger) + text_after_trigger = text_before_cursor[symbol_pos + len(self.trigger) :] + start_position = -(len(text_after_trigger)) + for model_name in self.model_names: + meta = "Model (selected)" if model_name == get_active_model() else "Model" + yield Completion( + model_name, + start_position=start_position, + display=model_name, + display_meta=meta, + ) + + +def update_model_in_input(text: str) -> Optional[str]: + # If input starts with /model or /m and a model name, set model and strip it out + content = text.strip() + + # Check for /model command + if content.startswith("/model"): + rest = content[6:].strip() # Remove '/model' + for model in load_model_names(): + if rest == model: + set_active_model(model) + # Remove /model from the input + idx = text.find("/model" + model) + if idx != -1: + new_text = ( + text[:idx] + text[idx + len("/model" + model) :] + ).strip() + return new_text + + # Check for /m command + elif content.startswith("/m "): + rest = content[3:].strip() # Remove '/m ' + for model in load_model_names(): + if rest == model: + set_active_model(model) + # Remove /m from the input + idx = text.find("/m " + model) + if idx != -1: + new_text = (text[:idx] + text[idx + len("/m " + model) :]).strip() + return new_text + + return None + + +async def get_input_with_model_completion( + prompt_str: str = ">>> ", + trigger: str = "/model", + history_file: Optional[str] = None, +) -> str: + history = FileHistory(os.path.expanduser(history_file)) if history_file else None + session = PromptSession( + completer=ModelNameCompleter(trigger), + history=history, + complete_while_typing=True, + ) + text = await session.prompt_async(prompt_str) + possibly_stripped = update_model_in_input(text) + if possibly_stripped is not None: + return possibly_stripped + return text diff --git a/code_puppy/command_line/motd.py b/code_puppy/command_line/motd.py new file mode 100644 index 00000000..f6ce321c --- /dev/null +++ b/code_puppy/command_line/motd.py @@ -0,0 +1,67 @@ +""" +🐶 MOTD (Message of the Day) feature for code-puppy! 🐕 +Stores seen versions in ~/.code_puppy/motd.txt - woof woof! 🐾 +""" + +import os + +from code_puppy.config import CONFIG_DIR +from code_puppy.messaging import emit_info + +MOTD_VERSION = "2025-08-24" +MOTD_MESSAGE = """🐕‍🦺 +🐾``` +# 🐶🎉🐕 WOOF WOOF! AUGUST 24th 🐕🎉🐶 +40k Downloads! Woot! +Thanks for your support! +-Mike +""" +MOTD_TRACK_FILE = os.path.join(CONFIG_DIR, "motd.txt") + + +def has_seen_motd(version: str) -> bool: # 🐕 Check if puppy has seen this MOTD! + if not os.path.exists(MOTD_TRACK_FILE): + return False + with open(MOTD_TRACK_FILE, "r") as f: + seen_versions = {line.strip() for line in f if line.strip()} + return version in seen_versions + + +def mark_motd_seen(version: str): # 🐶 Mark MOTD as seen by this good puppy! + # Create directory if it doesn't exist 🏠🐕 + os.makedirs(os.path.dirname(MOTD_TRACK_FILE), exist_ok=True) + + # Check if the version is already in the file 📋🐶 + seen_versions = set() + if os.path.exists(MOTD_TRACK_FILE): + with open(MOTD_TRACK_FILE, "r") as f: + seen_versions = {line.strip() for line in f if line.strip()} + + # Only add the version if it's not already there 📝🐕‍🦺 + if version not in seen_versions: + with open(MOTD_TRACK_FILE, "a") as f: + f.write(f"{version}\n") + + +def print_motd( + console=None, force: bool = False +) -> bool: # 🐶 Print exciting puppy MOTD! + """ + 🐕 Print the message of the day to the user - woof woof! 🐕 + + Args: + console: Optional console object (for backward compatibility) 🖥️🐶 + force: Whether to force printing even if the MOTD has been seen 💪🐕‍🦺 + + Returns: + True if the MOTD was printed, False otherwise 🐾 + """ + if force or not has_seen_motd(MOTD_VERSION): + # Create a Rich Markdown object for proper rendering 🎨🐶 + from rich.markdown import Markdown + + markdown_content = Markdown(MOTD_MESSAGE) + emit_info(markdown_content) + mark_motd_seen(MOTD_VERSION) + return True + return False diff --git a/code_puppy/command_line/prompt_toolkit_completion.py b/code_puppy/command_line/prompt_toolkit_completion.py index af1fad20..11af2a9e 100644 --- a/code_puppy/command_line/prompt_toolkit_completion.py +++ b/code_puppy/command_line/prompt_toolkit_completion.py @@ -1,152 +1,406 @@ -import os -import glob -from typing import Optional, Iterable +# ANSI color codes are no longer necessary because prompt_toolkit handles +# styling via the `Style` class. We keep them here commented-out in case +# someone needs raw ANSI later, but they are unused in the current code. +# RESET = '\033[0m' +# GREEN = '\033[1;32m' +# CYAN = '\033[1;36m' +# YELLOW = '\033[1;33m' +# BOLD = '\033[1m' import asyncio +import os +from typing import Optional from prompt_toolkit import PromptSession -from prompt_toolkit.completion import Completer, Completion +from prompt_toolkit.completion import Completer, Completion, merge_completers +from prompt_toolkit.formatted_text import FormattedText from prompt_toolkit.history import FileHistory -from prompt_toolkit.document import Document +from prompt_toolkit.filters import is_searching +from prompt_toolkit.key_binding import KeyBindings +from prompt_toolkit.keys import Keys +from prompt_toolkit.layout.processors import Processor, Transformation +from prompt_toolkit.styles import Style + +from code_puppy.command_line.file_path_completion import FilePathCompleter +from code_puppy.command_line.load_context_completion import LoadContextCompleter +from code_puppy.command_line.model_picker_completion import ( + ModelNameCompleter, + get_active_model, + update_model_in_input, +) +from code_puppy.command_line.utils import list_directory +from code_puppy.config import ( + COMMAND_HISTORY_FILE, + get_config_keys, + get_puppy_name, + get_value, +) +from code_puppy.command_line.attachments import ( + DEFAULT_ACCEPTED_DOCUMENT_EXTENSIONS, + DEFAULT_ACCEPTED_IMAGE_EXTENSIONS, + _detect_path_tokens, _tokenise, +) + + +class SetCompleter(Completer): + def __init__(self, trigger: str = "/set"): + self.trigger = trigger + + def get_completions(self, document, complete_event): + text_before_cursor = document.text_before_cursor + stripped_text_for_trigger_check = text_before_cursor.lstrip() + + if not stripped_text_for_trigger_check.startswith(self.trigger): + return + # Determine the part of the text that is relevant for this completer + # This handles cases like " /set foo" where the trigger isn't at the start of the string + actual_trigger_pos = text_before_cursor.find(self.trigger) + effective_input = text_before_cursor[ + actual_trigger_pos: + ] # e.g., "/set keypart" or "/set " -class FilePathCompleter(Completer): - """A simple file path completer that works with a trigger symbol.""" + tokens = effective_input.split() + + # Case 1: Input is exactly the trigger (e.g., "/set") and nothing more (not even a trailing space on effective_input). + # Suggest adding a space. + if ( + len(tokens) == 1 + and tokens[0] == self.trigger + and not effective_input.endswith(" ") + ): + yield Completion( + text=self.trigger + " ", # Text to insert + start_position=-len(tokens[0]), # Replace the trigger itself + display=self.trigger + " ", # Visual display + display_meta="set config key", + ) + return + + # Case 2: Input is trigger + space (e.g., "/set ") or trigger + partial key (e.g., "/set partial") + base_to_complete = "" + if len(tokens) > 1: # e.g., ["/set", "partialkey"] + base_to_complete = tokens[1] + # If len(tokens) == 1, it implies effective_input was like "/set ", so base_to_complete remains "" + # This means we list all keys. + + # --- SPECIAL HANDLING FOR 'model' KEY --- + if base_to_complete == "model": + # Don't return any completions -- let ModelNameCompleter handle it + return + for key in get_config_keys(): + if key == "model" or key == "puppy_token": + continue # exclude 'model' and 'puppy_token' from regular /set completions + if key.startswith(base_to_complete): + prev_value = get_value(key) + value_part = f" = {prev_value}" if prev_value is not None else " = " + completion_text = f"{key}{value_part}" + + yield Completion( + completion_text, + start_position=-len( + base_to_complete + ), # Correctly replace only the typed part of the key + display_meta="", + ) - def __init__(self, symbol: str = "@"): - self.symbol = symbol - def get_completions( - self, document: Document, complete_event - ) -> Iterable[Completion]: +class AttachmentPlaceholderProcessor(Processor): + """Display friendly placeholders for recognised attachments.""" + + _PLACEHOLDER_STYLE = "class:attachment-placeholder" + + def apply_transformation(self, transformation_input): + document = transformation_input.document text = document.text - cursor_position = document.cursor_position + if not text: + return Transformation(list(transformation_input.fragments)) + + detections, _warnings = _detect_path_tokens(text) + replacements: list[tuple[int, int, str]] = [] + search_cursor = 0 + ESCAPE_MARKER = "\u0000ESCAPED_SPACE\u0000" + masked_text = text.replace(r"\ ", ESCAPE_MARKER) + token_view = list(_tokenise(masked_text)) + for detection in detections: + display_text: str | None = None + if detection.path and detection.has_path(): + suffix = detection.path.suffix.lower() + if suffix in DEFAULT_ACCEPTED_IMAGE_EXTENSIONS: + display_text = f"[{suffix.lstrip('.') or 'image'} image]" + elif suffix in DEFAULT_ACCEPTED_DOCUMENT_EXTENSIONS: + display_text = f"[{suffix.lstrip('.') or 'file'} document]" + else: + display_text = "[file attachment]" + elif detection.link is not None: + display_text = "[link]" + + if not display_text: + continue + + # Use token-span for robust lookup (handles escaped spaces) + span_tokens = token_view[detection.start_index:detection.consumed_until] + raw_span = " ".join(span_tokens).replace(ESCAPE_MARKER, r"\ ") + index = text.find(raw_span, search_cursor) + span_len = len(raw_span) + if index == -1: + # Fallback to placeholder string + placeholder = detection.placeholder + index = text.find(placeholder, search_cursor) + span_len = len(placeholder) + if index == -1: + continue + replacements.append((index, index + span_len, display_text)) + search_cursor = index + span_len + + if not replacements: + return Transformation(list(transformation_input.fragments)) + + replacements.sort(key=lambda item: item[0]) + + new_fragments: list[tuple[str, str]] = [] + source_to_display_map: list[int] = [] + display_to_source_map: list[int] = [] + + source_index = 0 + display_index = 0 - # Check if our symbol is in the text before the cursor - text_before_cursor = text[:cursor_position] - if self.symbol not in text_before_cursor: - return # Symbol not found, no completions + def append_plain_segment(segment: str) -> None: + nonlocal source_index, display_index + if not segment: + return + new_fragments.append(("", segment)) + for _ in segment: + source_to_display_map.append(display_index) + display_to_source_map.append(source_index) + source_index += 1 + display_index += 1 - # Find the position of the last occurrence of the symbol before cursor - symbol_pos = text_before_cursor.rfind(self.symbol) + for start, end, replacement_text in replacements: + if start > source_index: + append_plain_segment(text[source_index:start]) - # Get the text after the symbol up to the cursor - text_after_symbol = text_before_cursor[symbol_pos + len(self.symbol) :] + placeholder = replacement_text or "" + placeholder_start = display_index + if placeholder: + new_fragments.append((self._PLACEHOLDER_STYLE, placeholder)) + for _ in placeholder: + display_to_source_map.append(start) + display_index += 1 - # Calculate start position - entire path will be replaced - start_position = -(len(text_after_symbol)) + for _ in text[source_index:end]: + source_to_display_map.append(placeholder_start if placeholder else display_index) + source_index += 1 - # Get matching files using glob pattern + if source_index < len(text): + append_plain_segment(text[source_index:]) + + def source_to_display(pos: int) -> int: + if pos < 0: + return 0 + if pos < len(source_to_display_map): + return source_to_display_map[pos] + return display_index + + def display_to_source(pos: int) -> int: + if pos < 0: + return 0 + if pos < len(display_to_source_map): + return display_to_source_map[pos] + return len(source_to_display_map) + + return Transformation( + new_fragments, + source_to_display=source_to_display, + display_to_source=display_to_source, + ) + + +class CDCompleter(Completer): + def __init__(self, trigger: str = "/cd"): + self.trigger = trigger + + def get_completions(self, document, complete_event): + text = document.text_before_cursor + if not text.strip().startswith(self.trigger): + return + tokens = text.strip().split() + if len(tokens) == 1: + base = "" + else: + base = tokens[1] try: - pattern = text_after_symbol + "*" - - # For empty pattern or pattern ending with /, list current directory - if not pattern.strip("*") or pattern.strip("*").endswith("/"): - base_path = pattern.strip("*") - if not base_path: # If empty, use current directory - base_path = "." - - # Make sure we have an absolute path or handle ~ expansion - if base_path.startswith("~"): - base_path = os.path.expanduser(base_path) - - # List all files in the directory - if os.path.isdir(base_path): - paths = [ - os.path.join(base_path, f) - for f in os.listdir(base_path) - if not f.startswith(".") or text_after_symbol.endswith(".") - ] + prefix = os.path.expanduser(base) + part = os.path.dirname(prefix) if os.path.dirname(prefix) else "." + dirs, _ = list_directory(part) + dirnames = [d for d in dirs if d.startswith(os.path.basename(base))] + base_dir = os.path.dirname(base) + for d in dirnames: + # Build the completion text so we keep the already-typed directory parts. + if base_dir and base_dir != ".": + suggestion = os.path.join(base_dir, d) else: - paths = [] - else: - # For partial filename, use glob directly - paths = glob.glob(pattern) - - # Filter out hidden files unless explicitly requested - if not pattern.startswith(".") and not pattern.startswith("*/."): - paths = [ - p for p in paths if not os.path.basename(p).startswith(".") - ] - - # Sort for consistent display - paths.sort() - - for path in paths: - is_dir = os.path.isdir(path) - display = os.path.basename(path) - - # Determine display path (what gets inserted) - if os.path.isabs(path): - # Already absolute path - display_path = path - else: - # Convert to relative or absolute based on input - if text_after_symbol.startswith("/"): - # User wants absolute path - display_path = os.path.abspath(path) - elif text_after_symbol.startswith("~"): - # User wants home-relative path - home = os.path.expanduser("~") - if path.startswith(home): - display_path = "~" + path[len(home) :] - else: - display_path = path - else: - # Keep it as is (relative to current directory) - display_path = path - - display_meta = "Directory" if is_dir else "File" - + suggestion = d + # Append trailing slash so the user can continue tabbing into sub-dirs. + suggestion = suggestion.rstrip(os.sep) + os.sep yield Completion( - display_path, - start_position=start_position, - display=display, - display_meta=display_meta, + suggestion, + start_position=-len(base), + display=d + os.sep, + display_meta="Directory", ) - except (PermissionError, FileNotFoundError, OSError): - # Handle access errors gracefully + except Exception: + # Silently ignore errors (e.g., permission issues, non-existent dir) pass -async def get_input_with_path_completion( - prompt_str: str = ">>> ", symbol: str = "@", history_file: Optional[str] = None +def get_prompt_with_active_model(base: str = ">>> "): + from code_puppy.agents.agent_manager import get_current_agent + + puppy = get_puppy_name() + global_model = get_active_model() or "(default)" + + # Get current agent information + current_agent = get_current_agent() + agent_display = current_agent.display_name if current_agent else "code-puppy" + + # Check if current agent has a pinned model + agent_model = None + if current_agent and hasattr(current_agent, "get_model_name"): + agent_model = current_agent.get_model_name() + + # Determine which model to display + if agent_model and agent_model != global_model: + # Show both models when they differ + model_display = f"[{global_model} → {agent_model}]" + elif agent_model: + # Show only the agent model when pinned + model_display = f"[{agent_model}]" + else: + # Show only the global model when no agent model is pinned + model_display = f"[{global_model}]" + + cwd = os.getcwd() + home = os.path.expanduser("~") + if cwd.startswith(home): + cwd_display = "~" + cwd[len(home) :] + else: + cwd_display = cwd + return FormattedText( + [ + ("bold", "🐶 "), + ("class:puppy", f"{puppy}"), + ("", " "), + ("class:agent", f"[{agent_display}] "), + ("class:model", model_display + " "), + ("class:cwd", "(" + str(cwd_display) + ") "), + ("class:arrow", str(base)), + ] + ) + + +async def get_input_with_combined_completion( + prompt_str=">>> ", history_file: Optional[str] = None ) -> str: - """ - Get user input with path completion support. + history = FileHistory(history_file) if history_file else None + completer = merge_completers( + [ + FilePathCompleter(symbol="@"), + ModelNameCompleter(trigger="/model"), + CDCompleter(trigger="/cd"), + SetCompleter(trigger="/set"), + LoadContextCompleter(trigger="/load_context"), + ] + ) + # Add custom key bindings and multiline toggle + bindings = KeyBindings() + + # Multiline mode state + multiline = {"enabled": False} - Args: - prompt_str: The prompt string to display - symbol: The symbol that triggers path completion - history_file: Path to the history file + # Toggle multiline with Alt+M + @bindings.add(Keys.Escape, "m") + def _(event): + multiline["enabled"] = not multiline["enabled"] + status = "ON" if multiline["enabled"] else "OFF" + # Print status for user feedback (version-agnostic) + print(f"[multiline] {status}", flush=True) - Returns: - The user input string - """ - # Create history instance if a history file is provided - history = FileHistory(os.path.expanduser(history_file)) if history_file else None + # Also toggle multiline with F2 (more reliable across platforms) + @bindings.add("f2") + def _(event): + multiline["enabled"] = not multiline["enabled"] + status = "ON" if multiline["enabled"] else "OFF" + print(f"[multiline] {status}", flush=True) + + # Newline insert bindings — robust and explicit + # Ctrl+J (line feed) works in virtually all terminals; mark eager so it wins + @bindings.add("c-j", eager=True) + def _(event): + event.app.current_buffer.insert_text("\n") + + # Also allow Ctrl+Enter for newline (terminal-dependent) + try: + @bindings.add("c-enter", eager=True) + def _(event): + event.app.current_buffer.insert_text("\n") + except Exception: + pass + + # Enter behavior depends on multiline mode + @bindings.add("enter", filter=~is_searching, eager=True) + def _(event): + if multiline["enabled"]: + event.app.current_buffer.insert_text("\n") + else: + event.current_buffer.validate_and_handle() + + @bindings.add(Keys.Escape) + def _(event): + """Cancel the current prompt when the user presses the ESC key alone.""" + event.app.exit(exception=KeyboardInterrupt) - # Create a session with our custom completer session = PromptSession( - completer=FilePathCompleter(symbol), history=history, complete_while_typing=True + completer=completer, + history=history, + complete_while_typing=True, + key_bindings=bindings, + input_processors=[AttachmentPlaceholderProcessor()], ) + # If they pass a string, backward-compat: convert it to formatted_text + if isinstance(prompt_str, str): + from prompt_toolkit.formatted_text import FormattedText - # Get input with completion - using async prompt to work with existing event loop - return await session.prompt_async(prompt_str) + prompt_str = FormattedText([(None, prompt_str)]) + style = Style.from_dict( + { + # Keys must AVOID the 'class:' prefix – that prefix is used only when + # tagging tokens in `FormattedText`. See prompt_toolkit docs. + "puppy": "bold magenta", + "owner": "bold white", + "agent": "bold blue", + "model": "bold cyan", + "cwd": "bold green", + "arrow": "bold yellow", + "attachment-placeholder": "italic cyan", + } + ) + text = await session.prompt_async(prompt_str, style=style) + possibly_stripped = update_model_in_input(text) + if possibly_stripped is not None: + return possibly_stripped + return text -# Example usage if __name__ == "__main__": - print( - "Type '@' followed by a path to see completion in action. Press Ctrl+D to exit." - ) + print("Type '@' for path-completion or '/model' to pick a model. Ctrl+D to exit.") async def main(): while True: try: - user_input = await get_input_with_path_completion( - ">>> ", history_file="~/.path_completion_history.txt" + inp = await get_input_with_combined_completion( + get_prompt_with_active_model(), history_file=COMMAND_HISTORY_FILE ) - print(f"You entered: {user_input}") + print(f"You entered: {inp}") except KeyboardInterrupt: continue except EOFError: diff --git a/code_puppy/command_line/utils.py b/code_puppy/command_line/utils.py new file mode 100644 index 00000000..1a742ee6 --- /dev/null +++ b/code_puppy/command_line/utils.py @@ -0,0 +1,39 @@ +import os +from typing import List, Tuple + +from rich.table import Table + + +def list_directory(path: str = None) -> Tuple[List[str], List[str]]: + """ + Returns (dirs, files) for the specified path, splitting out directories and files. + """ + if path is None: + path = os.getcwd() + entries = [] + try: + entries = [e for e in os.listdir(path)] + except Exception as e: + raise RuntimeError(f"Error listing directory: {e}") + dirs = [e for e in entries if os.path.isdir(os.path.join(path, e))] + files = [e for e in entries if not os.path.isdir(os.path.join(path, e))] + return dirs, files + + +def make_directory_table(path: str = None) -> Table: + """ + Returns a rich.Table object containing the directory listing. + """ + if path is None: + path = os.getcwd() + dirs, files = list_directory(path) + table = Table( + title=f"\U0001f4c1 [bold blue]Current directory:[/bold blue] [cyan]{path}[/cyan]" + ) + table.add_column("Type", style="dim", width=8) + table.add_column("Name", style="bold") + for d in sorted(dirs): + table.add_row("[green]dir[/green]", f"[cyan]{d}[/cyan]") + for f in sorted(files): + table.add_row("[yellow]file[/yellow]", f"{f}") + return table diff --git a/code_puppy/config.py b/code_puppy/config.py new file mode 100644 index 00000000..dc1a3f1c --- /dev/null +++ b/code_puppy/config.py @@ -0,0 +1,792 @@ +import configparser +import datetime +import json +import os +import pathlib +from typing import Optional + +from code_puppy.session_storage import save_session + +CONFIG_DIR = os.path.join(os.path.expanduser("~"), ".code_puppy") +CONFIG_FILE = os.path.join(CONFIG_DIR, "puppy.cfg") +MCP_SERVERS_FILE = os.path.join(CONFIG_DIR, "mcp_servers.json") +COMMAND_HISTORY_FILE = os.path.join(CONFIG_DIR, "command_history.txt") +MODELS_FILE = os.path.join(CONFIG_DIR, "models.json") +EXTRA_MODELS_FILE = os.path.join(CONFIG_DIR, "extra_models.json") +AGENTS_DIR = os.path.join(CONFIG_DIR, "agents") +CONTEXTS_DIR = os.path.join(CONFIG_DIR, "contexts") +AUTOSAVE_DIR = os.path.join(CONFIG_DIR, "autosaves") + +DEFAULT_SECTION = "puppy" +REQUIRED_KEYS = ["puppy_name", "owner_name"] + +# Runtime-only autosave session ID (per-process) +_CURRENT_AUTOSAVE_ID: Optional[str] = None + +# Cache containers for model validation and defaults +_model_validation_cache = {} +_default_model_cache = None +_default_vision_model_cache = None +_default_vqa_model_cache = None + + +def ensure_config_exists(): + """ + Ensure that the .code_puppy dir and puppy.cfg exist, prompting if needed. + Returns configparser.ConfigParser for reading. + """ + if not os.path.exists(CONFIG_DIR): + os.makedirs(CONFIG_DIR, exist_ok=True) + exists = os.path.isfile(CONFIG_FILE) + config = configparser.ConfigParser() + if exists: + config.read(CONFIG_FILE) + missing = [] + if DEFAULT_SECTION not in config: + config[DEFAULT_SECTION] = {} + for key in REQUIRED_KEYS: + if not config[DEFAULT_SECTION].get(key): + missing.append(key) + if missing: + print("🐾 Let's get your Puppy ready!") + for key in missing: + if key == "puppy_name": + val = input("What should we name the puppy? ").strip() + elif key == "owner_name": + val = input( + "What's your name (so Code Puppy knows its owner)? " + ).strip() + else: + val = input(f"Enter {key}: ").strip() + config[DEFAULT_SECTION][key] = val + with open(CONFIG_FILE, "w") as f: + config.write(f) + return config + + +def get_value(key: str): + config = configparser.ConfigParser() + config.read(CONFIG_FILE) + val = config.get(DEFAULT_SECTION, key, fallback=None) + return val + + +def get_puppy_name(): + return get_value("puppy_name") or "Puppy" + + +def get_owner_name(): + return get_value("owner_name") or "Master" + + +# Legacy function removed - message history limit is no longer used +# Message history is now managed by token-based compaction system +# using get_protected_token_count() and get_summarization_threshold() + + +def get_allow_recursion() -> bool: + """ + Get the allow_recursion configuration value. + Returns True if recursion is allowed, False otherwise. + """ + val = get_value("allow_recursion") + if val is None: + return True # Default to False for safety + return str(val).lower() in ("1", "true", "yes", "on") + + +def get_model_context_length() -> int: + """ + Get the context length for the currently configured model from models.json + """ + try: + from code_puppy.model_factory import ModelFactory + + model_configs = ModelFactory.load_config() + model_name = get_global_model_name() + + # Get context length from model config + model_config = model_configs.get(model_name, {}) + context_length = model_config.get("context_length", 128000) # Default value + + return int(context_length) + except Exception: + # Fallback to default context length if anything goes wrong + return 128000 + + +# --- CONFIG SETTER STARTS HERE --- +def get_config_keys(): + """ + Returns the list of all config keys currently in puppy.cfg, + plus certain preset expected keys (e.g. "yolo_mode", "model", "compaction_strategy", "message_limit", "allow_recursion"). + """ + default_keys = [ + "yolo_mode", + "model", + "compaction_strategy", + "protected_token_count", + "compaction_threshold", + "message_limit", + "allow_recursion", + "openai_reasoning_effort", + "auto_save_session", + "max_saved_sessions", + ] + config = configparser.ConfigParser() + config.read(CONFIG_FILE) + keys = set(config[DEFAULT_SECTION].keys()) if DEFAULT_SECTION in config else set() + keys.update(default_keys) + return sorted(keys) + + +def set_config_value(key: str, value: str): + """ + Sets a config value in the persistent config file. + """ + config = configparser.ConfigParser() + config.read(CONFIG_FILE) + if DEFAULT_SECTION not in config: + config[DEFAULT_SECTION] = {} + config[DEFAULT_SECTION][key] = value + with open(CONFIG_FILE, "w") as f: + config.write(f) + + +# --- MODEL STICKY EXTENSION STARTS HERE --- +def load_mcp_server_configs(): + """ + Loads the MCP server configurations from ~/.code_puppy/mcp_servers.json. + Returns a dict mapping names to their URL or config dict. + If file does not exist, returns an empty dict. + """ + from code_puppy.messaging.message_queue import emit_error, emit_system_message + + try: + if not pathlib.Path(MCP_SERVERS_FILE).exists(): + emit_system_message("[dim]No MCP configuration was found[/dim]") + return {} + with open(MCP_SERVERS_FILE, "r") as f: + conf = json.loads(f.read()) + return conf["mcp_servers"] + except Exception as e: + emit_error(f"Failed to load MCP servers - {str(e)}") + return {} + + +def _default_model_from_models_json(): + """Attempt to load the first model name from models.json. + + Falls back to the hard-coded default (``gpt-5``) if the file + cannot be read for any reason or is empty. + """ + global _default_model_cache + + if _default_model_cache is not None: + return _default_model_cache + + try: + from code_puppy.model_factory import ModelFactory + + models_config = ModelFactory.load_config() + if models_config: + first_key = next(iter(models_config)) + _default_model_cache = first_key + return first_key + _default_model_cache = "gpt-5" + return "gpt-5" + except Exception: + _default_model_cache = "gpt-5" + return "gpt-5" + + +def _default_vision_model_from_models_json() -> str: + """Select a default vision-capable model from models.json with caching.""" + global _default_vision_model_cache + + if _default_vision_model_cache is not None: + return _default_vision_model_cache + + try: + from code_puppy.model_factory import ModelFactory + + models_config = ModelFactory.load_config() + if models_config: + # Prefer explicitly tagged vision models + for name, config in models_config.items(): + if config.get("supports_vision"): + _default_vision_model_cache = name + return name + + # Fallback heuristic: common multimodal models + preferred_candidates = ( + "gpt-4.1", + "gpt-4.1-mini", + "gpt-4.1-nano", + "claude-4-0-sonnet", + "gemini-2.5-flash-preview-05-20", + ) + for candidate in preferred_candidates: + if candidate in models_config: + _default_vision_model_cache = candidate + return candidate + + # Last resort: use the general default model + _default_vision_model_cache = _default_model_from_models_json() + return _default_vision_model_cache + + _default_vision_model_cache = "gpt-4.1" + return "gpt-4.1" + except Exception: + _default_vision_model_cache = "gpt-4.1" + return "gpt-4.1" + + +def _default_vqa_model_from_models_json() -> str: + """Select a default VQA-capable model, preferring vision-ready options.""" + global _default_vqa_model_cache + + if _default_vqa_model_cache is not None: + return _default_vqa_model_cache + + try: + from code_puppy.model_factory import ModelFactory + + models_config = ModelFactory.load_config() + if models_config: + # Allow explicit VQA hints if present + for name, config in models_config.items(): + if config.get("supports_vqa"): + _default_vqa_model_cache = name + return name + + # Reuse multimodal heuristics before falling back to generic default + preferred_candidates = ( + "gpt-4.1", + "gpt-4.1-mini", + "claude-4-0-sonnet", + "gemini-2.5-flash-preview-05-20", + "gpt-4.1-nano", + ) + for candidate in preferred_candidates: + if candidate in models_config: + _default_vqa_model_cache = candidate + return candidate + + _default_vqa_model_cache = _default_model_from_models_json() + return _default_vqa_model_cache + + _default_vqa_model_cache = "gpt-4.1" + return "gpt-4.1" + except Exception: + _default_vqa_model_cache = "gpt-4.1" + return "gpt-4.1" + + +def _validate_model_exists(model_name: str) -> bool: + """Check if a model exists in models.json with caching to avoid redundant calls.""" + global _model_validation_cache + + # Check cache first + if model_name in _model_validation_cache: + return _model_validation_cache[model_name] + + try: + from code_puppy.model_factory import ModelFactory + + models_config = ModelFactory.load_config() + exists = model_name in models_config + + # Cache the result + _model_validation_cache[model_name] = exists + return exists + except Exception: + # If we can't validate, assume it exists to avoid breaking things + _model_validation_cache[model_name] = True + return True + + +def clear_model_cache(): + """Clear the model validation cache. Call this when models.json changes.""" + global \ + _model_validation_cache, \ + _default_model_cache, \ + _default_vision_model_cache, \ + _default_vqa_model_cache + _model_validation_cache.clear() + _default_model_cache = None + _default_vision_model_cache = None + _default_vqa_model_cache = None + + +def get_global_model_name(): + """Return a valid model name for Code Puppy to use. + + 1. Look at ``model`` in *puppy.cfg*. + 2. If that value exists **and** is present in *models.json*, use it. + 3. Otherwise return the first model listed in *models.json*. + 4. As a last resort (e.g. + *models.json* unreadable) fall back to ``claude-4-0-sonnet``. + """ + + stored_model = get_value("model") + + if stored_model: + # Use cached validation to avoid hitting ModelFactory every time + if _validate_model_exists(stored_model): + return stored_model + + # Either no stored model or it's not valid – choose default from models.json + return _default_model_from_models_json() + + +def set_model_name(model: str): + """Sets the model name in the persistent config file.""" + config = configparser.ConfigParser() + config.read(CONFIG_FILE) + if DEFAULT_SECTION not in config: + config[DEFAULT_SECTION] = {} + config[DEFAULT_SECTION]["model"] = model or "" + with open(CONFIG_FILE, "w") as f: + config.write(f) + + # Clear model cache when switching models to ensure fresh validation + clear_model_cache() + + +def get_vqa_model_name() -> str: + """Return the configured VQA model, falling back to an inferred default.""" + stored_model = get_value("vqa_model_name") + if stored_model and _validate_model_exists(stored_model): + return stored_model + return _default_vqa_model_from_models_json() + + +def set_vqa_model_name(model: str): + """Persist the configured VQA model name and refresh caches.""" + set_config_value("vqa_model_name", model or "") + clear_model_cache() + + +def get_puppy_token(): + """Returns the puppy_token from config, or None if not set.""" + return get_value("puppy_token") + + +def set_puppy_token(token: str): + """Sets the puppy_token in the persistent config file.""" + set_config_value("puppy_token", token) + + +def get_openai_reasoning_effort() -> str: + """Return the configured OpenAI reasoning effort (low, medium, high).""" + allowed_values = {"low", "medium", "high"} + configured = (get_value("openai_reasoning_effort") or "medium").strip().lower() + if configured not in allowed_values: + return "medium" + return configured + + +def set_openai_reasoning_effort(value: str) -> None: + """Persist the OpenAI reasoning effort ensuring it remains within allowed values.""" + allowed_values = {"low", "medium", "high"} + normalized = (value or "").strip().lower() + if normalized not in allowed_values: + raise ValueError( + f"Invalid reasoning effort '{value}'. Allowed: {', '.join(sorted(allowed_values))}" + ) + set_config_value("openai_reasoning_effort", normalized) + + +def normalize_command_history(): + """ + Normalize the command history file by converting old format timestamps to the new format. + + Old format example: + - "# 2025-08-04 12:44:45.469829" + + New format example: + - "# 2025-08-05T10:35:33" (ISO) + """ + import os + import re + + # Skip implementation during tests + import sys + + if "pytest" in sys.modules: + return + + # Skip normalization if file doesn't exist + command_history_exists = os.path.isfile(COMMAND_HISTORY_FILE) + if not command_history_exists: + return + + try: + # Read the entire file + with open(COMMAND_HISTORY_FILE, "r") as f: + content = f.read() + + # Skip empty files + if not content.strip(): + return + + # Define regex pattern for old timestamp format + # Format: "# YYYY-MM-DD HH:MM:SS.ffffff" + old_timestamp_pattern = r"# (\d{4}-\d{2}-\d{2}) (\d{2}:\d{2}:\d{2})\.(\d+)" + + # Function to convert matched timestamp to ISO format + def convert_to_iso(match): + date = match.group(1) + time = match.group(2) + # Create ISO format (YYYY-MM-DDThh:mm:ss) + return f"# {date}T{time}" + + # Replace all occurrences of the old timestamp format with the new ISO format + updated_content = re.sub(old_timestamp_pattern, convert_to_iso, content) + + # Write the updated content back to the file only if changes were made + if content != updated_content: + with open(COMMAND_HISTORY_FILE, "w") as f: + f.write(updated_content) + except Exception as e: + from rich.console import Console + + direct_console = Console() + error_msg = f"❌ An unexpected error occurred while normalizing command history: {str(e)}" + direct_console.print(f"[bold red]{error_msg}[/bold red]") + + +def get_user_agents_directory() -> str: + """Get the user's agents directory path. + + Returns: + Path to the user's Code Puppy agents directory. + """ + # Ensure the agents directory exists + os.makedirs(AGENTS_DIR, exist_ok=True) + return AGENTS_DIR + + +def initialize_command_history_file(): + """Create the command history file if it doesn't exist. + Handles migration from the old history file location for backward compatibility. + Also normalizes the command history format if needed. + """ + import os + from pathlib import Path + + command_history_exists = os.path.isfile(COMMAND_HISTORY_FILE) + if not command_history_exists: + try: + Path(COMMAND_HISTORY_FILE).touch() + + # For backwards compatibility, copy the old history file, then remove it + old_history_file = os.path.join( + os.path.expanduser("~"), ".code_puppy_history.txt" + ) + old_history_exists = os.path.isfile(old_history_file) + if old_history_exists: + import shutil + + shutil.copy2(Path(old_history_file), Path(COMMAND_HISTORY_FILE)) + Path(old_history_file).unlink(missing_ok=True) + + # Normalize the command history format if needed + normalize_command_history() + except Exception as e: + from rich.console import Console + + direct_console = Console() + error_msg = f"❌ An unexpected error occurred while trying to initialize history file: {str(e)}" + direct_console.print(f"[bold red]{error_msg}[/bold red]") + + +def get_yolo_mode(): + """ + Checks puppy.cfg for 'yolo_mode' (case-insensitive in value only). + Defaults to True if not set. + Allowed values for ON: 1, '1', 'true', 'yes', 'on' (all case-insensitive for value). + """ + true_vals = {"1", "true", "yes", "on"} + cfg_val = get_value("yolo_mode") + if cfg_val is not None: + if str(cfg_val).strip().lower() in true_vals: + return True + return False + return True + + +def get_mcp_disabled(): + """ + Checks puppy.cfg for 'disable_mcp' (case-insensitive in value only). + Defaults to False if not set. + Allowed values for ON: 1, '1', 'true', 'yes', 'on' (all case-insensitive for value). + When enabled, Code Puppy will skip loading MCP servers entirely. + """ + true_vals = {"1", "true", "yes", "on"} + cfg_val = get_value("disable_mcp") + if cfg_val is not None: + if str(cfg_val).strip().lower() in true_vals: + return True + return False + return False + + +def get_protected_token_count(): + """ + Returns the user-configured protected token count for message history compaction. + This is the number of tokens in recent messages that won't be summarized. + Defaults to 50000 if unset or misconfigured. + Configurable by 'protected_token_count' key. + Enforces that protected tokens don't exceed 75% of model context length. + """ + val = get_value("protected_token_count") + try: + # Get the model context length to enforce the 75% limit + model_context_length = get_model_context_length() + max_protected_tokens = int(model_context_length * 0.75) + + # Parse the configured value + configured_value = int(val) if val else 50000 + + # Apply constraints: minimum 1000, maximum 75% of context length + return max(1000, min(configured_value, max_protected_tokens)) + except (ValueError, TypeError): + # If parsing fails, return a reasonable default that respects the 75% limit + model_context_length = get_model_context_length() + max_protected_tokens = int(model_context_length * 0.75) + return min(50000, max_protected_tokens) + + +def get_compaction_threshold(): + """ + Returns the user-configured compaction threshold as a float between 0.0 and 1.0. + This is the proportion of model context that triggers compaction. + Defaults to 0.85 (85%) if unset or misconfigured. + Configurable by 'compaction_threshold' key. + """ + val = get_value("compaction_threshold") + try: + threshold = float(val) if val else 0.85 + # Clamp between reasonable bounds + return max(0.8, min(0.95, threshold)) + except (ValueError, TypeError): + return 0.85 + + +def get_compaction_strategy() -> str: + """ + Returns the user-configured compaction strategy. + Options are 'summarization' or 'truncation'. + Defaults to 'summarization' if not set or misconfigured. + Configurable by 'compaction_strategy' key. + """ + val = get_value("compaction_strategy") + if val and val.lower() in ["summarization", "truncation"]: + return val.lower() + # Default to summarization + return "truncation" + + +def get_message_limit(default: int = 100) -> int: + """ + Returns the user-configured message/request limit for the agent. + This controls how many steps/requests the agent can take. + Defaults to 100 if unset or misconfigured. + Configurable by 'message_limit' key. + """ + val = get_value("message_limit") + try: + return int(val) if val else default + except (ValueError, TypeError): + return default + + +def save_command_to_history(command: str): + """Save a command to the history file with an ISO format timestamp. + + Args: + command: The command to save + """ + import datetime + + try: + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + with open(COMMAND_HISTORY_FILE, "a") as f: + f.write(f"\n# {timestamp}\n{command}\n") + except Exception as e: + from rich.console import Console + + direct_console = Console() + error_msg = ( + f"❌ An unexpected error occurred while saving command history: {str(e)}" + ) + direct_console.print(f"[bold red]{error_msg}[/bold red]") + + +def get_agent_pinned_model(agent_name: str) -> str: + """Get the pinned model for a specific agent. + + Args: + agent_name: Name of the agent to get the pinned model for. + + Returns: + Pinned model name, or None if no model is pinned for this agent. + """ + return get_value(f"agent_model_{agent_name}") + + +def set_agent_pinned_model(agent_name: str, model_name: str): + """Set the pinned model for a specific agent. + + Args: + agent_name: Name of the agent to pin the model for. + model_name: Model name to pin to this agent. + """ + set_config_value(f"agent_model_{agent_name}", model_name) + + +def clear_agent_pinned_model(agent_name: str): + """Clear the pinned model for a specific agent. + + Args: + agent_name: Name of the agent to clear the pinned model for. + """ + # We can't easily delete keys from configparser, so set to empty string + # which will be treated as None by get_agent_pinned_model + set_config_value(f"agent_model_{agent_name}", "") + + +def get_auto_save_session() -> bool: + """ + Checks puppy.cfg for 'auto_save_session' (case-insensitive in value only). + Defaults to True if not set. + Allowed values for ON: 1, '1', 'true', 'yes', 'on' (all case-insensitive for value). + """ + true_vals = {"1", "true", "yes", "on"} + cfg_val = get_value("auto_save_session") + if cfg_val is not None: + if str(cfg_val).strip().lower() in true_vals: + return True + return False + return True + + +def set_auto_save_session(enabled: bool): + """Sets the auto_save_session configuration value. + + Args: + enabled: Whether to enable auto-saving of sessions + """ + set_config_value("auto_save_session", "true" if enabled else "false") + + +def get_max_saved_sessions() -> int: + """ + Gets the maximum number of sessions to keep. + Defaults to 20 if not set. + """ + cfg_val = get_value("max_saved_sessions") + if cfg_val is not None: + try: + val = int(cfg_val) + return max(0, val) # Ensure non-negative + except (ValueError, TypeError): + pass + return 20 + + +def set_max_saved_sessions(max_sessions: int): + """Sets the max_saved_sessions configuration value. + + Args: + max_sessions: Maximum number of sessions to keep (0 for unlimited) + """ + set_config_value("max_saved_sessions", str(max_sessions)) + + +def get_current_autosave_id() -> str: + """Get or create the current autosave session ID for this process.""" + global _CURRENT_AUTOSAVE_ID + if not _CURRENT_AUTOSAVE_ID: + # Use a full timestamp so tests and UX can predict the name if needed + _CURRENT_AUTOSAVE_ID = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + return _CURRENT_AUTOSAVE_ID + + +def rotate_autosave_id() -> str: + """Force a new autosave session ID and return it.""" + global _CURRENT_AUTOSAVE_ID + _CURRENT_AUTOSAVE_ID = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + return _CURRENT_AUTOSAVE_ID + + +def get_current_autosave_session_name() -> str: + """Return the full session name used for autosaves (no file extension).""" + return f"auto_session_{get_current_autosave_id()}" + + +def set_current_autosave_from_session_name(session_name: str) -> str: + """Set the current autosave ID based on a full session name. + + Accepts names like 'auto_session_YYYYMMDD_HHMMSS' and extracts the ID part. + Returns the ID that was set. + """ + global _CURRENT_AUTOSAVE_ID + prefix = "auto_session_" + if session_name.startswith(prefix): + _CURRENT_AUTOSAVE_ID = session_name[len(prefix):] + else: + _CURRENT_AUTOSAVE_ID = session_name + return _CURRENT_AUTOSAVE_ID + + +def auto_save_session_if_enabled() -> bool: + """Automatically save the current session if auto_save_session is enabled.""" + if not get_auto_save_session(): + return False + + try: + import pathlib + from rich.console import Console + + from code_puppy.agents.agent_manager import get_current_agent + + console = Console() + + current_agent = get_current_agent() + history = current_agent.get_message_history() + if not history: + return False + + now = datetime.datetime.now() + session_name = get_current_autosave_session_name() + autosave_dir = pathlib.Path(AUTOSAVE_DIR) + + metadata = save_session( + history=history, + session_name=session_name, + base_dir=autosave_dir, + timestamp=now.isoformat(), + token_estimator=current_agent.estimate_tokens_for_message, + auto_saved=True, + ) + + console.print( + f"🐾 [dim]Auto-saved session: {metadata.message_count} messages ({metadata.total_tokens} tokens)[/dim]" + ) + + return True + + except Exception as exc: # pragma: no cover - defensive logging + from rich.console import Console + + Console().print(f"[dim]❌ Failed to auto-save session: {exc}[/dim]") + return False + + +def finalize_autosave_session() -> str: + """Persist the current autosave snapshot and rotate to a fresh session.""" + auto_save_session_if_enabled() + return rotate_autosave_id() diff --git a/code_puppy/http_utils.py b/code_puppy/http_utils.py new file mode 100644 index 00000000..ec96010c --- /dev/null +++ b/code_puppy/http_utils.py @@ -0,0 +1,248 @@ +""" +HTTP utilities module for code-puppy. + +This module provides functions for creating properly configured HTTP clients. +""" + +import os +import socket +from typing import Dict, Optional, Union + +import httpx +import requests +from tenacity import stop_after_attempt, wait_exponential + +try: + from pydantic_ai.retries import ( + AsyncTenacityTransport, + RetryConfig, + TenacityTransport, + wait_retry_after, + ) +except ImportError: + # Fallback if pydantic_ai.retries is not available + AsyncTenacityTransport = None + RetryConfig = None + TenacityTransport = None + wait_retry_after = None + +try: + from .reopenable_async_client import ReopenableAsyncClient +except ImportError: + ReopenableAsyncClient = None + +try: + from .messaging import emit_info +except ImportError: + # Fallback if messaging system is not available + def emit_info(content: str, **metadata): + pass # No-op if messaging system is not available + + +def get_cert_bundle_path() -> str: + # First check if SSL_CERT_FILE environment variable is set + ssl_cert_file = os.environ.get("SSL_CERT_FILE") + if ssl_cert_file and os.path.exists(ssl_cert_file): + return ssl_cert_file + + +def create_client( + timeout: int = 180, + verify: Union[bool, str] = None, + headers: Optional[Dict[str, str]] = None, + retry_status_codes: tuple = (429, 502, 503, 504), +) -> httpx.Client: + if verify is None: + verify = get_cert_bundle_path() + + # If retry components are available, create a client with retry transport + if TenacityTransport and RetryConfig and wait_retry_after: + + def should_retry_status(response): + """Raise exceptions for retryable HTTP status codes.""" + if response.status_code in retry_status_codes: + emit_info( + f"HTTP retry: Retrying request due to status code {response.status_code}" + ) + return True + + transport = TenacityTransport( + config=RetryConfig( + retry=lambda e: isinstance(e, httpx.HTTPStatusError) + and e.response.status_code in retry_status_codes, + wait=wait_retry_after( + fallback_strategy=wait_exponential(multiplier=1, max=60), + max_wait=300, + ), + stop=stop_after_attempt(10), + reraise=True, + ), + validate_response=should_retry_status, + ) + + return httpx.Client( + transport=transport, verify=verify, headers=headers or {}, timeout=timeout + ) + else: + # Fallback to regular client if retry components are not available + return httpx.Client(verify=verify, headers=headers or {}, timeout=timeout) + + +def create_async_client( + timeout: int = 180, + verify: Union[bool, str] = None, + headers: Optional[Dict[str, str]] = None, + retry_status_codes: tuple = (429, 502, 503, 504), +) -> httpx.AsyncClient: + if verify is None: + verify = get_cert_bundle_path() + + # If retry components are available, create a client with retry transport + if AsyncTenacityTransport and RetryConfig and wait_retry_after: + + def should_retry_status(response): + """Raise exceptions for retryable HTTP status codes.""" + if response.status_code in retry_status_codes: + emit_info( + f"HTTP retry: Retrying request due to status code {response.status_code}" + ) + return True + + transport = AsyncTenacityTransport( + config=RetryConfig( + retry=lambda e: isinstance(e, httpx.HTTPStatusError) + and e.response.status_code in retry_status_codes, + wait=wait_retry_after(10), + stop=stop_after_attempt(10), + reraise=True, + ), + validate_response=should_retry_status, + ) + + return httpx.AsyncClient( + transport=transport, verify=verify, headers=headers or {}, timeout=timeout + ) + else: + # Fallback to regular client if retry components are not available + return httpx.AsyncClient(verify=verify, headers=headers or {}, timeout=timeout) + + +def create_requests_session( + timeout: float = 5.0, + verify: Union[bool, str] = None, + headers: Optional[Dict[str, str]] = None, +) -> requests.Session: + session = requests.Session() + + if verify is None: + verify = get_cert_bundle_path() + + session.verify = verify + + if headers: + session.headers.update(headers or {}) + + return session + + +def create_auth_headers( + api_key: str, header_name: str = "Authorization" +) -> Dict[str, str]: + return {header_name: f"Bearer {api_key}"} + + +def resolve_env_var_in_header(headers: Dict[str, str]) -> Dict[str, str]: + resolved_headers = {} + + for key, value in headers.items(): + if isinstance(value, str): + try: + expanded = os.path.expandvars(value) + resolved_headers[key] = expanded + except Exception: + resolved_headers[key] = value + else: + resolved_headers[key] = value + + return resolved_headers + + +def create_reopenable_async_client( + timeout: int = 180, + verify: Union[bool, str] = None, + headers: Optional[Dict[str, str]] = None, + retry_status_codes: tuple = (429, 502, 503, 504), +) -> Union[ReopenableAsyncClient, httpx.AsyncClient]: + if verify is None: + verify = get_cert_bundle_path() + + # If retry components are available, create a client with retry transport + if AsyncTenacityTransport and RetryConfig and wait_retry_after: + + def should_retry_status(response): + """Raise exceptions for retryable HTTP status codes.""" + if response.status_code in retry_status_codes: + emit_info( + f"HTTP retry: Retrying request due to status code {response.status_code}" + ) + return True + + transport = AsyncTenacityTransport( + config=RetryConfig( + retry=lambda e: isinstance(e, httpx.HTTPStatusError) + and e.response.status_code in retry_status_codes, + wait=wait_retry_after( + fallback_strategy=wait_exponential(multiplier=1, max=60), + max_wait=300, + ), + stop=stop_after_attempt(10), + reraise=True, + ), + validate_response=should_retry_status, + ) + + if ReopenableAsyncClient is not None: + return ReopenableAsyncClient( + transport=transport, + verify=verify, + headers=headers or {}, + timeout=timeout, + ) + else: + # Fallback to regular AsyncClient if ReopenableAsyncClient is not available + return httpx.AsyncClient( + transport=transport, + verify=verify, + headers=headers or {}, + timeout=timeout, + ) + else: + # Fallback to regular clients if retry components are not available + if ReopenableAsyncClient is not None: + return ReopenableAsyncClient( + verify=verify, headers=headers or {}, timeout=timeout + ) + else: + # Fallback to regular AsyncClient if ReopenableAsyncClient is not available + return httpx.AsyncClient( + verify=verify, headers=headers or {}, timeout=timeout + ) + + +def is_cert_bundle_available() -> bool: + cert_path = get_cert_bundle_path() + return os.path.exists(cert_path) and os.path.isfile(cert_path) + + +def find_available_port(start_port=8090, end_port=9010, host="127.0.0.1"): + for port in range(start_port, end_port + 1): + try: + # Try to bind to the port to check if it's available + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.bind((host, port)) + return port + except OSError: + # Port is in use, try the next one + continue + return None diff --git a/code_puppy/main.py b/code_puppy/main.py index 98c1c1e5..c8b9ed29 100644 --- a/code_puppy/main.py +++ b/code_puppy/main.py @@ -1,129 +1,385 @@ -import asyncio import argparse +import asyncio import os +import subprocess import sys -from dotenv import load_dotenv -from rich.console import Console -from rich.markdown import Markdown -from rich.console import ConsoleOptions, RenderResult -from rich.markdown import CodeBlock -from rich.text import Text +import time +import webbrowser +from pathlib import Path + +from rich.console import Console, ConsoleOptions, RenderResult +from rich.markdown import CodeBlock, Markdown from rich.syntax import Syntax +from rich.text import Text + +from code_puppy import __version__, callbacks, plugins +from code_puppy.agents import get_current_agent from code_puppy.command_line.prompt_toolkit_completion import ( - get_input_with_path_completion, + get_input_with_combined_completion, + get_prompt_with_active_model, ) - -# Initialize rich console for pretty output +from code_puppy.command_line.attachments import parse_prompt_attachments +from code_puppy.config import ( + AUTOSAVE_DIR, + COMMAND_HISTORY_FILE, + ensure_config_exists, + finalize_autosave_session, + initialize_command_history_file, + save_command_to_history, +) +from code_puppy.session_storage import restore_autosave_interactively +from code_puppy.http_utils import find_available_port from code_puppy.tools.common import console -from code_puppy.agent import code_generation_agent - -from code_puppy.tools import * +# message_history_accumulator and prune_interrupted_tool_calls have been moved to BaseAgent class +from code_puppy.tui_state import is_tui_mode, set_tui_mode +from code_puppy.version_checker import default_version_mismatch_behavior -# Define a function to get the secret file path -def get_secret_file_path(): - hidden_directory = os.path.join(os.path.expanduser("~"), ".agent_secret") - if not os.path.exists(hidden_directory): - os.makedirs(hidden_directory) - return os.path.join(hidden_directory, "history.txt") +plugins.load_plugin_callbacks() async def main(): - global shutdown_flag - - # Load environment variables from .env file - load_dotenv() - - # Set up argument parser parser = argparse.ArgumentParser(description="Code Puppy - A code generation agent") parser.add_argument( - "--interactive", "-i", action="store_true", help="Run in interactive mode" + "--version", + "-v", + action="version", + version=f"{__version__}", + help="Show version and exit", + ) + parser.add_argument( + "--interactive", + "-i", + action="store_true", + help="Run in interactive mode", + ) + parser.add_argument("--tui", "-t", action="store_true", help="Run in TUI mode") + parser.add_argument( + "--web", + "-w", + action="store_true", + help="Run in web mode (serves TUI in browser)", + ) + parser.add_argument( + "--prompt", + "-p", + type=str, + help="Execute a single prompt and exit (no interactive mode)", + ) + parser.add_argument( + "--agent", + "-a", + type=str, + help="Specify which agent to use (e.g., --agent code-puppy)", + ) + parser.add_argument( + "command", nargs="*", help="Run a single command (deprecated, use -p instead)" ) - parser.add_argument("command", nargs="*", help="Run a single command") args = parser.parse_args() - history_file_path = get_secret_file_path() + if args.tui or args.web: + set_tui_mode(True) + elif args.interactive or args.command or args.prompt: + set_tui_mode(False) + + message_renderer = None + if not is_tui_mode(): + from rich.console import Console - if args.command: - # Join the list of command arguments into a single string command - command = " ".join(args.command) + from code_puppy.messaging import ( + SynchronousInteractiveRenderer, + get_global_queue, + ) + + message_queue = get_global_queue() + display_console = Console() # Separate console for rendering messages + message_renderer = SynchronousInteractiveRenderer( + message_queue, display_console + ) + message_renderer.start() + + if ( + not args.tui + and not args.interactive + and not args.web + and not args.command + and not args.prompt + ): + pass + + initialize_command_history_file() + if args.web: + from rich.console import Console + + direct_console = Console() try: - while not shutdown_flag: - response = await code_generation_agent.run(command) - console.print(response.output_message) - if response.awaiting_user_input: - console.print( - "[bold red]The agent requires further input. Interactive mode is recommended for such tasks." - ) - except AttributeError as e: - console.print(f"[bold red]AttributeError:[/bold red] {str(e)}") - console.print( - "[bold yellow]\u26a0 The response might not be in the expected format, missing attributes like 'output_message'." + # Find an available port for the web server + available_port = find_available_port() + if available_port is None: + direct_console.print( + "[bold red]Error:[/bold red] No available ports in range 8090-9010!" + ) + sys.exit(1) + python_executable = sys.executable + serve_command = f"{python_executable} -m code_puppy --tui" + textual_serve_cmd = [ + "textual", + "serve", + "-c", + serve_command, + "--port", + str(available_port), + ] + direct_console.print( + "[bold blue]🌐 Starting Code Puppy web interface...[/bold blue]" ) + direct_console.print(f"[dim]Running: {' '.join(textual_serve_cmd)}[/dim]") + web_url = f"http://localhost:{available_port}" + direct_console.print( + f"[green]Web interface will be available at: {web_url}[/green]" + ) + direct_console.print("[yellow]Press Ctrl+C to stop the server.[/yellow]\n") + process = subprocess.Popen(textual_serve_cmd) + time.sleep(0.3) + try: + direct_console.print( + "[cyan]🚀 Opening web interface in your default browser...[/cyan]" + ) + webbrowser.open(web_url) + direct_console.print("[green]✅ Browser opened successfully![/green]\n") + except Exception as e: + direct_console.print( + f"[yellow]⚠️ Could not automatically open browser: {e}[/yellow]" + ) + direct_console.print( + f"[yellow]Please manually open: {web_url}[/yellow]\n" + ) + result = process.wait() + sys.exit(result) + except Exception as e: + direct_console.print( + f"[bold red]Error starting web interface:[/bold red] {str(e)}" + ) + sys.exit(1) + from code_puppy.messaging import emit_system_message + + emit_system_message("🐶 Code Puppy is Loading...") + + available_port = find_available_port() + if available_port is None: + error_msg = "Error: No available ports in range 8090-9010!" + emit_system_message(f"[bold red]{error_msg}[/bold red]") + return + + ensure_config_exists() + + # Handle agent selection from command line + if args.agent: + from code_puppy.agents.agent_manager import set_current_agent, get_available_agents + + agent_name = args.agent.lower() + try: + # First check if the agent exists by getting available agents + available_agents = get_available_agents() + if agent_name not in available_agents: + emit_system_message(f"[bold red]Error:[/bold red] Agent '{agent_name}' not found") + emit_system_message(f"Available agents: {', '.join(available_agents.keys())}") + sys.exit(1) + + # Agent exists, set it + set_current_agent(agent_name) + emit_system_message(f"🤖 Using agent: {agent_name}") except Exception as e: - console.print(f"[bold red]Unexpected Error:[/bold red] {str(e)}") - elif args.interactive: - await interactive_mode(history_file_path) + emit_system_message(f"[bold red]Error setting agent:[/bold red] {str(e)}") + sys.exit(1) + + current_version = __version__ + + no_version_update = os.getenv("NO_VERSION_UPDATE", "").lower() in ( + "1", + "true", + "yes", + "on", + ) + if no_version_update: + version_msg = f"Current version: {current_version}" + update_disabled_msg = ( + "Update phase disabled because NO_VERSION_UPDATE is set to 1 or true" + ) + emit_system_message(version_msg) + emit_system_message(f"[dim]{update_disabled_msg}[/dim]") else: - parser.print_help() + if len(callbacks.get_callbacks("version_check")): + await callbacks.on_version_check(current_version) + else: + default_version_mismatch_behavior(current_version) + + await callbacks.on_startup() + + global shutdown_flag + shutdown_flag = False + try: + initial_command = None + prompt_only_mode = False + + if args.prompt: + initial_command = args.prompt + prompt_only_mode = True + elif args.command: + initial_command = " ".join(args.command) + prompt_only_mode = False + + if prompt_only_mode: + await execute_single_prompt(initial_command, message_renderer) + elif is_tui_mode(): + try: + from code_puppy.tui import run_textual_ui + + await run_textual_ui(initial_command=initial_command) + except ImportError: + from code_puppy.messaging import emit_error, emit_warning + + emit_error( + "Error: Textual UI not available. Install with: pip install textual" + ) + emit_warning("Falling back to interactive mode...") + await interactive_mode(message_renderer) + except Exception as e: + from code_puppy.messaging import emit_error, emit_warning + + emit_error(f"TUI Error: {str(e)}") + emit_warning("Falling back to interactive mode...") + await interactive_mode(message_renderer) + elif args.interactive or initial_command: + await interactive_mode(message_renderer, initial_command=initial_command) + else: + await prompt_then_interactive_mode(message_renderer) + finally: + if message_renderer: + message_renderer.stop() + await callbacks.on_shutdown() # Add the file handling functionality for interactive mode -async def interactive_mode(history_file_path: str) -> None: +async def interactive_mode(message_renderer, initial_command: str = None) -> None: + from code_puppy.command_line.command_handler import handle_command + """Run the agent in interactive mode.""" - console.print("[bold green]Code Puppy[/bold green] - Interactive Mode") - console.print("Type 'exit' or 'quit' to exit the interactive mode.") - console.print("Type 'clear' to reset the conversation history.") - console.print( - "Type [bold blue]@[/bold blue] followed by a path to use file path completion." + + display_console = message_renderer.console + from code_puppy.messaging import emit_info, emit_system_message + + emit_info("[bold green]Code Puppy[/bold green] - Interactive Mode") + emit_system_message("Type '/exit' or '/quit' to exit the interactive mode.") + emit_system_message("Type 'clear' to reset the conversation history.") + emit_system_message("[dim]Type /help to view all commands[/dim]") + emit_system_message( + "Type [bold blue]@[/bold blue] for path completion, or [bold blue]/m[/bold blue] to pick a model. Toggle multiline with [bold blue]Alt+M[/bold blue] or [bold blue]F2[/bold blue]; newline: [bold blue]Ctrl+J[/bold blue]." + ) + emit_system_message( + "Press [bold red]Ctrl+C[/bold red] during processing to cancel the current task or inference." ) + try: + from code_puppy.command_line.motd import print_motd + + print_motd(console, force=False) + except Exception as e: + from code_puppy.messaging import emit_warning + + emit_warning(f"MOTD error: {e}") + from code_puppy.messaging import emit_info + + emit_info("[bold cyan]Initializing agent...[/bold cyan]") + + + # Initialize the runtime agent manager + if initial_command: + from code_puppy.agents import get_current_agent + from code_puppy.messaging import emit_info, emit_system_message + + agent = get_current_agent() + emit_info( + f"[bold blue]Processing initial command:[/bold blue] {initial_command}" + ) + + try: + # Check if any tool is waiting for user input before showing spinner + try: + from code_puppy.tools.command_runner import is_awaiting_user_input + + awaiting_input = is_awaiting_user_input() + except ImportError: + awaiting_input = False + + # Run with or without spinner based on whether we're awaiting input + response = await run_prompt_with_attachments( + agent, + initial_command, + spinner_console=display_console, + use_spinner=not awaiting_input, + ) + if response is not None: + agent_response = response.output + + emit_system_message( + f"\n[bold purple]AGENT RESPONSE: [/bold purple]\n{agent_response}" + ) + emit_system_message("\n" + "=" * 50) + emit_info("[bold green]🐶 Continuing in Interactive Mode[/bold green]") + emit_system_message( + "Your command and response are preserved in the conversation history." + ) + emit_system_message("=" * 50 + "\n") + + except Exception as e: + from code_puppy.messaging import emit_error + + emit_error(f"Error processing initial command: {str(e)}") # Check if prompt_toolkit is installed try: - import prompt_toolkit + from code_puppy.messaging import emit_system_message - console.print("[dim]Using prompt_toolkit for enhanced tab completion[/dim]") - except ImportError: - console.print( - "[yellow]Warning: prompt_toolkit not installed. Installing now...[/yellow]" + emit_system_message( + "[dim]Using prompt_toolkit for enhanced tab completion[/dim]" ) + except ImportError: + from code_puppy.messaging import emit_warning + + emit_warning("Warning: prompt_toolkit not installed. Installing now...") try: import subprocess subprocess.check_call( [sys.executable, "-m", "pip", "install", "prompt_toolkit"] ) - console.print("[green]Successfully installed prompt_toolkit[/green]") - except Exception as e: - console.print(f"[bold red]Error installing prompt_toolkit: {e}[/bold red]") - console.print( - "[yellow]Falling back to basic input without tab completion[/yellow]" - ) + from code_puppy.messaging import emit_success - message_history = [] + emit_success("Successfully installed prompt_toolkit") + except Exception as e: + from code_puppy.messaging import emit_error, emit_warning - # Set up history file in home directory - history_file_path_prompt = os.path.expanduser("~/.code_puppy_history.txt") - history_dir = os.path.dirname(history_file_path_prompt) + emit_error(f"Error installing prompt_toolkit: {e}") + emit_warning("Falling back to basic input without tab completion") - # Ensure history directory exists - if history_dir and not os.path.exists(history_dir): - try: - os.makedirs(history_dir, exist_ok=True) - except Exception as e: - console.print( - f"[yellow]Warning: Could not create history directory: {e}[/yellow]" - ) + await restore_autosave_interactively(Path(AUTOSAVE_DIR)) while True: - console.print("[bold blue]Enter your coding task:[/bold blue]") + from code_puppy.agents.agent_manager import get_current_agent + from code_puppy.messaging import emit_info + + # Get the custom prompt from the current agent, or use default + current_agent = get_current_agent() + user_prompt = current_agent.get_user_prompt() or "Enter your coding task:" + + emit_info(f"[bold blue]{user_prompt}[/bold blue]") try: # Use prompt_toolkit for enhanced input with path completion try: - # Use the async version of get_input_with_path_completion - task = await get_input_with_path_completion( - ">>> 🐶 ", symbol="@", history_file=history_file_path_prompt + # Use the async version of get_input_with_combined_completion + task = await get_input_with_combined_completion( + get_prompt_with_active_model(), history_file=COMMAND_HISTORY_FILE ) except ImportError: # Fall back to basic input if prompt_toolkit is not available @@ -131,60 +387,99 @@ async def interactive_mode(history_file_path: str) -> None: except (KeyboardInterrupt, EOFError): # Handle Ctrl+C or Ctrl+D - console.print("\n[yellow]Input cancelled[/yellow]") + from code_puppy.messaging import emit_warning + + emit_warning("\nInput cancelled") continue - # Check for exit commands - if task.strip().lower() in ["exit", "quit"]: - console.print("[bold green]Goodbye![/bold green]") + # Check for exit commands (plain text or command form) + if task.strip().lower() in ["exit", "quit"] or task.strip().lower() in [ + "/exit", + "/quit", + ]: + from code_puppy.messaging import emit_success + + emit_success("Goodbye!") + # The renderer is stopped in the finally block of main(). break - # Check for clear command - if task.strip().lower() == "clear": - message_history = [] - console.print("[bold yellow]Conversation history cleared![/bold yellow]") - console.print( - "[dim]The agent will not remember previous interactions.[/dim]\n" - ) + # Check for clear command (supports both `clear` and `/clear`) + if task.strip().lower() in ("clear", "/clear"): + from code_puppy.messaging import emit_info, emit_system_message, emit_warning + + agent = get_current_agent() + new_session_id = finalize_autosave_session() + agent.clear_message_history() + emit_warning("Conversation history cleared!") + emit_system_message("The agent will not remember previous interactions.\n") + emit_info(f"[dim]Auto-save session rotated to: {new_session_id}[/dim]") continue - if task.strip(): - console.print(f"\n[bold blue]Processing task:[/bold blue] {task}\n") + # Parse attachments first so leading paths aren't misread as commands + processed_for_commands = parse_prompt_attachments(task) + cleaned_for_commands = (processed_for_commands.prompt or "").strip() - # Write to the secret file for permanent history - with open(history_file_path, "a") as f: - f.write(f"{task}\n") + # Handle / commands based on cleaned prompt (after stripping attachments) + if cleaned_for_commands.startswith("/"): + try: + command_result = handle_command(cleaned_for_commands) + except Exception as e: + from code_puppy.messaging import emit_error + emit_error(f"Command error: {e}") + # Continue interactive loop instead of exiting + continue + if command_result is True: + continue + elif isinstance(command_result, str): + # Command returned a prompt to execute + task = command_result + elif command_result is False: + # Command not recognized, continue with normal processing + pass + + if task.strip(): + # Write to the secret file for permanent history with timestamp + save_command_to_history(task) try: prettier_code_blocks() - console.log(f"Asking: {task}...", style="cyan") - - # Store agent's full response - agent_response = None + # No need to get agent directly - use manager's run methods - result = await code_generation_agent.run( - task, message_history=message_history + # Use our custom helper to enable attachment handling with spinner support + result = await run_prompt_with_attachments( + current_agent, + task, + spinner_console=message_renderer.console, ) + # Check if the task was cancelled (but don't show message if we just killed processes) + if result is None: + continue # Get the structured response agent_response = result.output - console.print(agent_response.output_message) + from code_puppy.messaging import emit_info - # Update message history with all messages from this interaction - message_history = result.new_messages() + emit_system_message( + f"\n[bold purple]AGENT RESPONSE: [/bold purple]\n{agent_response}" + ) - if agent_response and agent_response.awaiting_user_input: - console.print( - "\n[bold yellow]\u26a0 Agent needs your input to continue.[/bold yellow]" - ) + # Auto-save session if enabled + from code_puppy.config import auto_save_session_if_enabled + auto_save_session_if_enabled() - # Show context status - console.print( - f"[dim]Context: {len(message_history)} messages in history[/dim]\n" - ) + # Ensure console output is flushed before next prompt + # This fixes the issue where prompt doesn't appear after agent response + display_console.file.flush() if hasattr( + display_console.file, "flush" + ) else None + import time + + time.sleep(0.1) # Brief pause to ensure all messages are rendered except Exception: - console.print_exception(show_locals=True) + from code_puppy.messaging.queue_console import get_queue_console + + get_queue_console().print_exception() def prettier_code_blocks(): @@ -207,10 +502,157 @@ def __rich_console__( Markdown.elements["fence"] = SimpleCodeBlock +async def run_prompt_with_attachments( + agent, + raw_prompt: str, + *, + spinner_console=None, + use_spinner: bool = True, +): + """Run the agent after parsing CLI attachments for image/document support.""" + from code_puppy.messaging import emit_system_message, emit_warning + + processed_prompt = parse_prompt_attachments(raw_prompt) + + for warning in processed_prompt.warnings: + emit_warning(warning) + + summary_parts = [] + if processed_prompt.attachments: + summary_parts.append(f"binary files: {len(processed_prompt.attachments)}") + if processed_prompt.link_attachments: + summary_parts.append(f"urls: {len(processed_prompt.link_attachments)}") + if summary_parts: + emit_system_message( + "[dim]Attachments detected -> " + ", ".join(summary_parts) + "[/dim]" + ) + + if not processed_prompt.prompt: + emit_warning( + "Prompt is empty after removing attachments; add instructions and retry." + ) + return None + + attachments = [attachment.content for attachment in processed_prompt.attachments] + link_attachments = [link.url_part for link in processed_prompt.link_attachments] + + if use_spinner and spinner_console is not None: + from code_puppy.messaging.spinner import ConsoleSpinner + + with ConsoleSpinner(console=spinner_console): + return await agent.run_with_mcp( + processed_prompt.prompt, + attachments=attachments, + link_attachments=link_attachments, + ) + + return await agent.run_with_mcp( + processed_prompt.prompt, + attachments=attachments, + link_attachments=link_attachments, + ) + + +async def execute_single_prompt(prompt: str, message_renderer) -> None: + """Execute a single prompt and exit (for -p flag).""" + from code_puppy.messaging import emit_info, emit_system_message + + emit_info(f"[bold blue]Executing prompt:[/bold blue] {prompt}") + + try: + # Get agent through runtime manager and use helper for attachments + agent = get_current_agent() + response = await run_prompt_with_attachments( + agent, + prompt, + spinner_console=message_renderer.console, + ) + if response is None: + return + + agent_response = response.output + emit_system_message( + f"\n[bold purple]AGENT RESPONSE: [/bold purple]\n{agent_response}" + ) + + except asyncio.CancelledError: + from code_puppy.messaging import emit_warning + + emit_warning("Execution cancelled by user") + except Exception as e: + from code_puppy.messaging import emit_error + + emit_error(f"Error executing prompt: {str(e)}") + + +async def prompt_then_interactive_mode(message_renderer) -> None: + """Prompt user for input, execute it, then continue in interactive mode.""" + from code_puppy.messaging import emit_info, emit_system_message + + emit_info("[bold green]🐶 Code Puppy[/bold green] - Enter your request") + emit_system_message( + "After processing your request, you'll continue in interactive mode." + ) + + try: + # Get user input + from code_puppy.command_line.prompt_toolkit_completion import ( + get_input_with_combined_completion, + get_prompt_with_active_model, + ) + from code_puppy.config import COMMAND_HISTORY_FILE + + emit_info("[bold blue]What would you like me to help you with?[/bold blue]") + + try: + # Use prompt_toolkit for enhanced input with path completion + user_prompt = await get_input_with_combined_completion( + get_prompt_with_active_model(), history_file=COMMAND_HISTORY_FILE + ) + except ImportError: + # Fall back to basic input if prompt_toolkit is not available + user_prompt = input(">>> ") + + if user_prompt.strip(): + # Execute the prompt + await execute_single_prompt(user_prompt, message_renderer) + + # Transition to interactive mode + emit_system_message("\n" + "=" * 50) + emit_info("[bold green]🐶 Continuing in Interactive Mode[/bold green]") + emit_system_message( + "Your request and response are preserved in the conversation history." + ) + emit_system_message("=" * 50 + "\n") + + # Continue in interactive mode with the initial command as history + await interactive_mode(message_renderer, initial_command=user_prompt) + else: + # No input provided, just go to interactive mode + await interactive_mode(message_renderer) + + except (KeyboardInterrupt, EOFError): + from code_puppy.messaging import emit_warning + + emit_warning("\nInput cancelled. Starting interactive mode...") + await interactive_mode(message_renderer) + except Exception as e: + from code_puppy.messaging import emit_error + + emit_error(f"Error in prompt mode: {str(e)}") + emit_info("Falling back to interactive mode...") + await interactive_mode(message_renderer) + + def main_entry(): """Entry point for the installed CLI tool.""" - asyncio.run(main()) + try: + asyncio.run(main()) + except KeyboardInterrupt: + # Just exit gracefully with no error message + callbacks.on_shutdown() + return 0 if __name__ == "__main__": - main_entry() + main_entry() \ No newline at end of file diff --git a/code_puppy/mcp_/__init__.py b/code_puppy/mcp_/__init__.py new file mode 100644 index 00000000..f3857200 --- /dev/null +++ b/code_puppy/mcp_/__init__.py @@ -0,0 +1,49 @@ +"""MCP (Model Context Protocol) management system for Code Puppy. + +Note: Be careful not to create circular imports with config_wizard.py. +config_wizard.py imports ServerConfig and get_mcp_manager directly from +.manager to avoid circular dependencies with this package __init__.py +""" + +from .circuit_breaker import CircuitBreaker, CircuitOpenError, CircuitState +from .config_wizard import MCPConfigWizard, run_add_wizard +from .dashboard import MCPDashboard +from .error_isolation import ( + ErrorCategory, + ErrorStats, + MCPErrorIsolator, + QuarantinedServerError, + get_error_isolator, +) +from .managed_server import ManagedMCPServer, ServerConfig, ServerState +from .manager import MCPManager, ServerInfo, get_mcp_manager +from .registry import ServerRegistry +from .retry_manager import RetryManager, RetryStats, get_retry_manager, retry_mcp_call +from .status_tracker import Event, ServerStatusTracker + +__all__ = [ + "ManagedMCPServer", + "ServerConfig", + "ServerState", + "ServerStatusTracker", + "Event", + "MCPManager", + "ServerInfo", + "get_mcp_manager", + "ServerRegistry", + "MCPErrorIsolator", + "ErrorStats", + "ErrorCategory", + "QuarantinedServerError", + "get_error_isolator", + "CircuitBreaker", + "CircuitState", + "CircuitOpenError", + "RetryManager", + "RetryStats", + "get_retry_manager", + "retry_mcp_call", + "MCPDashboard", + "MCPConfigWizard", + "run_add_wizard", +] diff --git a/code_puppy/mcp_/async_lifecycle.py b/code_puppy/mcp_/async_lifecycle.py new file mode 100644 index 00000000..161d1841 --- /dev/null +++ b/code_puppy/mcp_/async_lifecycle.py @@ -0,0 +1,239 @@ +""" +Async server lifecycle management using pydantic-ai's context managers. + +This module properly manages MCP server lifecycles by maintaining async contexts +within the same task, allowing servers to start and stay running. +""" + +import asyncio +import logging +from contextlib import AsyncExitStack +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Dict, Optional, Union + +from pydantic_ai.mcp import MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP + +logger = logging.getLogger(__name__) + + +@dataclass +class ManagedServerContext: + """Represents a managed MCP server with its async context.""" + + server_id: str + server: Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP] + exit_stack: AsyncExitStack + start_time: datetime + task: asyncio.Task # The task that manages this server's lifecycle + + +class AsyncServerLifecycleManager: + """ + Manages MCP server lifecycles asynchronously. + + This properly maintains async contexts within the same task, + allowing servers to start and stay running independently of agents. + """ + + def __init__(self): + """Initialize the async lifecycle manager.""" + self._servers: Dict[str, ManagedServerContext] = {} + self._lock = asyncio.Lock() + logger.info("AsyncServerLifecycleManager initialized") + + async def start_server( + self, + server_id: str, + server: Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP], + ) -> bool: + """ + Start an MCP server and maintain its context. + + This creates a dedicated task that enters the server's context + and keeps it alive until explicitly stopped. + + Args: + server_id: Unique identifier for the server + server: The pydantic-ai MCP server instance + + Returns: + True if server started successfully, False otherwise + """ + async with self._lock: + # Check if already running + if server_id in self._servers: + if self._servers[server_id].server.is_running: + logger.info(f"Server {server_id} is already running") + return True + else: + # Server exists but not running, clean it up + logger.warning( + f"Server {server_id} exists but not running, cleaning up" + ) + await self._stop_server_internal(server_id) + + # Create a task that will manage this server's lifecycle + task = asyncio.create_task( + self._server_lifecycle_task(server_id, server), + name=f"mcp_server_{server_id}", + ) + + # Wait briefly for the server to start + await asyncio.sleep(0.1) + + # Check if task failed immediately + if task.done(): + try: + await task + except Exception as e: + logger.error(f"Failed to start server {server_id}: {e}") + return False + + logger.info(f"Server {server_id} starting in background task") + return True + + async def _server_lifecycle_task( + self, + server_id: str, + server: Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP], + ) -> None: + """ + Task that manages a server's lifecycle. + + This task enters the server's context and keeps it alive + until the server is stopped or an error occurs. + """ + exit_stack = AsyncExitStack() + + try: + logger.info(f"Starting server lifecycle for {server_id}") + + # Enter the server's context + await exit_stack.enter_async_context(server) + + # Store the managed context + async with self._lock: + self._servers[server_id] = ManagedServerContext( + server_id=server_id, + server=server, + exit_stack=exit_stack, + start_time=datetime.now(), + task=asyncio.current_task(), + ) + + logger.info(f"Server {server_id} started successfully") + + # Keep the task alive until cancelled + while True: + await asyncio.sleep(1) + + # Check if server is still running + if not server.is_running: + logger.warning(f"Server {server_id} stopped unexpectedly") + break + + except asyncio.CancelledError: + logger.info(f"Server {server_id} lifecycle task cancelled") + raise + except Exception as e: + logger.error(f"Error in server {server_id} lifecycle: {e}") + finally: + # Clean up the context + await exit_stack.aclose() + + # Remove from managed servers + async with self._lock: + if server_id in self._servers: + del self._servers[server_id] + + logger.info(f"Server {server_id} lifecycle ended") + + async def stop_server(self, server_id: str) -> bool: + """ + Stop a running MCP server. + + This cancels the lifecycle task, which properly exits the context. + + Args: + server_id: ID of the server to stop + + Returns: + True if server was stopped, False if not found + """ + async with self._lock: + return await self._stop_server_internal(server_id) + + async def _stop_server_internal(self, server_id: str) -> bool: + """ + Internal method to stop a server (must be called with lock held). + """ + if server_id not in self._servers: + logger.warning(f"Server {server_id} not found") + return False + + context = self._servers[server_id] + + # Cancel the lifecycle task + # This will cause the task to exit and clean up properly + context.task.cancel() + + try: + await context.task + except asyncio.CancelledError: + pass # Expected + + logger.info(f"Stopped server {server_id}") + return True + + def is_running(self, server_id: str) -> bool: + """ + Check if a server is running. + + Args: + server_id: ID of the server + + Returns: + True if server is running, False otherwise + """ + context = self._servers.get(server_id) + return context.server.is_running if context else False + + def list_servers(self) -> Dict[str, Dict[str, Any]]: + """ + List all running servers. + + Returns: + Dictionary of server IDs to server info + """ + servers = {} + for server_id, context in self._servers.items(): + uptime = (datetime.now() - context.start_time).total_seconds() + servers[server_id] = { + "type": context.server.__class__.__name__, + "is_running": context.server.is_running, + "uptime_seconds": uptime, + "start_time": context.start_time.isoformat(), + } + return servers + + async def stop_all(self) -> None: + """Stop all running servers.""" + server_ids = list(self._servers.keys()) + + for server_id in server_ids: + await self.stop_server(server_id) + + logger.info("All MCP servers stopped") + + +# Global singleton instance +_lifecycle_manager: Optional[AsyncServerLifecycleManager] = None + + +def get_lifecycle_manager() -> AsyncServerLifecycleManager: + """Get the global lifecycle manager instance.""" + global _lifecycle_manager + if _lifecycle_manager is None: + _lifecycle_manager = AsyncServerLifecycleManager() + return _lifecycle_manager diff --git a/code_puppy/mcp_/blocking_startup.py b/code_puppy/mcp_/blocking_startup.py new file mode 100644 index 00000000..c897d2e9 --- /dev/null +++ b/code_puppy/mcp_/blocking_startup.py @@ -0,0 +1,416 @@ +""" +MCP Server with blocking startup capability and stderr capture. + +This module provides MCP servers that: +1. Capture stderr output from stdio servers +2. Block until fully initialized before allowing operations +3. Emit stderr to users via emit_info with message groups +""" + +import asyncio +import os +import tempfile +import threading +import uuid +from contextlib import asynccontextmanager +from typing import List, Optional + +from mcp.client.stdio import StdioServerParameters, stdio_client +from pydantic_ai.mcp import MCPServerStdio + +from code_puppy.messaging import emit_info + + +class StderrFileCapture: + """Captures stderr to a file and monitors it in a background thread.""" + + def __init__( + self, + server_name: str, + emit_to_user: bool = True, + message_group: Optional[uuid.UUID] = None, + ): + self.server_name = server_name + self.emit_to_user = emit_to_user + self.message_group = message_group or uuid.uuid4() + self.temp_file = None + self.temp_path = None + self.monitor_thread = None + self.stop_monitoring = threading.Event() + self.captured_lines = [] + + def start(self): + """Start capture by creating temp file and monitor thread.""" + # Create temp file + self.temp_file = tempfile.NamedTemporaryFile( + mode="w+", delete=False, suffix=".err" + ) + self.temp_path = self.temp_file.name + + # Start monitoring thread + self.stop_monitoring.clear() + self.monitor_thread = threading.Thread(target=self._monitor_file) + self.monitor_thread.daemon = True + self.monitor_thread.start() + + return self.temp_file + + def _monitor_file(self): + """Monitor the temp file for new content.""" + if not self.temp_path: + return + + last_pos = 0 + while not self.stop_monitoring.is_set(): + try: + with open(self.temp_path, "r") as f: + f.seek(last_pos) + new_content = f.read() + if new_content: + last_pos = f.tell() + # Process new lines + for line in new_content.splitlines(): + if line.strip(): + self.captured_lines.append(line) + if self.emit_to_user: + emit_info( + f"[bold white on blue] MCP {self.server_name} [/bold white on blue] {line}", + style="dim cyan", + message_group=self.message_group, + ) + + except Exception: + pass # File might not exist yet or be deleted + + self.stop_monitoring.wait(0.1) # Check every 100ms + + def stop(self): + """Stop monitoring and clean up.""" + self.stop_monitoring.set() + if self.monitor_thread: + self.monitor_thread.join(timeout=1) + + if self.temp_file: + try: + self.temp_file.close() + except Exception: + pass + + if self.temp_path and os.path.exists(self.temp_path): + try: + # Read any remaining content + with open(self.temp_path, "r") as f: + content = f.read() + for line in content.splitlines(): + if line.strip() and line not in self.captured_lines: + self.captured_lines.append(line) + if self.emit_to_user: + emit_info( + f"[bold white on blue] MCP {self.server_name} [/bold white on blue] {line}", + style="dim cyan", + message_group=self.message_group, + ) + + os.unlink(self.temp_path) + except Exception: + pass + + def get_captured_lines(self) -> List[str]: + """Get all captured lines.""" + return self.captured_lines.copy() + + +class SimpleCapturedMCPServerStdio(MCPServerStdio): + """ + MCPServerStdio that captures stderr to a file and optionally emits to user. + """ + + def __init__( + self, + command: str, + args=(), + env=None, + cwd=None, + emit_stderr: bool = True, + message_group: Optional[uuid.UUID] = None, + **kwargs, + ): + super().__init__(command=command, args=args, env=env, cwd=cwd, **kwargs) + self.emit_stderr = emit_stderr + self.message_group = message_group or uuid.uuid4() + self._stderr_capture = None + + @asynccontextmanager + async def client_streams(self): + """Create streams with stderr capture.""" + server = StdioServerParameters( + command=self.command, args=list(self.args), env=self.env, cwd=self.cwd + ) + + # Create stderr capture + server_name = getattr(self, "tool_prefix", self.command) + self._stderr_capture = StderrFileCapture( + server_name, self.emit_stderr, self.message_group + ) + stderr_file = self._stderr_capture.start() + + try: + async with stdio_client(server=server, errlog=stderr_file) as ( + read_stream, + write_stream, + ): + yield read_stream, write_stream + finally: + self._stderr_capture.stop() + + def get_captured_stderr(self) -> List[str]: + """Get captured stderr lines.""" + if self._stderr_capture: + return self._stderr_capture.get_captured_lines() + return [] + + +class BlockingMCPServerStdio(SimpleCapturedMCPServerStdio): + """ + MCP Server that blocks until fully initialized. + + This server ensures that initialization is complete before + allowing any operations, preventing race conditions. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._initialized = asyncio.Event() + self._init_error: Optional[Exception] = None + self._initialization_task = None + + async def __aenter__(self): + """Enter context and track initialization.""" + try: + # Start initialization + result = await super().__aenter__() + + # Mark as initialized + self._initialized.set() + + # Emit success message + server_name = getattr(self, "tool_prefix", self.command) + emit_info( + f"✅ MCP Server '{server_name}' initialized successfully", + style="green", + message_group=self.message_group, + ) + + return result + + except Exception as e: + # Store error and mark as initialized (with error) + self._init_error = e + self._initialized.set() + + # Emit error message + server_name = getattr(self, "tool_prefix", self.command) + emit_info( + f"❌ MCP Server '{server_name}' failed to initialize: {e}", + style="red", + message_group=self.message_group, + ) + + raise + + async def wait_until_ready(self, timeout: float = 30.0) -> bool: + """ + Wait until the server is ready. + + Args: + timeout: Maximum time to wait in seconds + + Returns: + True if server is ready, False if timeout or error + + Raises: + TimeoutError: If server doesn't initialize within timeout + Exception: If server initialization failed + """ + try: + await asyncio.wait_for(self._initialized.wait(), timeout=timeout) + + # Check if there was an initialization error + if self._init_error: + raise self._init_error + + return True + + except asyncio.TimeoutError: + server_name = getattr(self, "tool_prefix", self.command) + raise TimeoutError( + f"Server '{server_name}' initialization timeout after {timeout}s" + ) + + async def ensure_ready(self, timeout: float = 30.0): + """ + Ensure server is ready before proceeding. + + This is a convenience method that raises if not ready. + + Args: + timeout: Maximum time to wait in seconds + + Raises: + TimeoutError: If server doesn't initialize within timeout + Exception: If server initialization failed + """ + await self.wait_until_ready(timeout) + + def is_ready(self) -> bool: + """ + Check if server is ready without blocking. + + Returns: + True if server is initialized and ready + """ + return self._initialized.is_set() and self._init_error is None + + +class StartupMonitor: + """ + Monitor for tracking multiple server startups. + + This class helps coordinate startup of multiple MCP servers + and ensures all are ready before proceeding. + """ + + def __init__(self, message_group: Optional[uuid.UUID] = None): + self.servers = {} + self.startup_times = {} + self.message_group = message_group or uuid.uuid4() + + def add_server(self, name: str, server: BlockingMCPServerStdio): + """Add a server to monitor.""" + self.servers[name] = server + + async def wait_all_ready(self, timeout: float = 30.0) -> dict: + """ + Wait for all servers to be ready. + + Args: + timeout: Maximum time to wait for all servers + + Returns: + Dictionary of server names to ready status + """ + import time + + results = {} + + # Create tasks for all servers + async def wait_server(name: str, server: BlockingMCPServerStdio): + start = time.time() + try: + await server.wait_until_ready(timeout) + self.startup_times[name] = time.time() - start + results[name] = True + emit_info( + f" {name}: Ready in {self.startup_times[name]:.2f}s", + style="dim green", + message_group=self.message_group, + ) + except Exception as e: + self.startup_times[name] = time.time() - start + results[name] = False + emit_info( + f" {name}: Failed after {self.startup_times[name]:.2f}s - {e}", + style="dim red", + message_group=self.message_group, + ) + + # Wait for all servers in parallel + emit_info( + f"⏳ Waiting for {len(self.servers)} MCP servers to initialize...", + style="cyan", + message_group=self.message_group, + ) + + tasks = [ + asyncio.create_task(wait_server(name, server)) + for name, server in self.servers.items() + ] + + await asyncio.gather(*tasks, return_exceptions=True) + + # Report summary + ready_count = sum(1 for r in results.values() if r) + total_count = len(results) + + if ready_count == total_count: + emit_info( + f"✅ All {total_count} servers ready!", + style="green bold", + message_group=self.message_group, + ) + else: + emit_info( + f"⚠️ {ready_count}/{total_count} servers ready", + style="yellow", + message_group=self.message_group, + ) + + return results + + def get_startup_report(self) -> str: + """Get a report of startup times.""" + lines = ["Server Startup Times:"] + for name, time_taken in self.startup_times.items(): + status = "✅" if self.servers[name].is_ready() else "❌" + lines.append(f" {status} {name}: {time_taken:.2f}s") + return "\n".join(lines) + + +async def start_servers_with_blocking( + *servers: BlockingMCPServerStdio, + timeout: float = 30.0, + message_group: Optional[uuid.UUID] = None, +): + """ + Start multiple servers and wait for all to be ready. + + Args: + *servers: Variable number of BlockingMCPServerStdio instances + timeout: Maximum time to wait for all servers + message_group: Optional UUID for grouping log messages + + Returns: + List of ready servers + + Example: + server1 = BlockingMCPServerStdio(...) + server2 = BlockingMCPServerStdio(...) + ready = await start_servers_with_blocking(server1, server2) + """ + monitor = StartupMonitor(message_group=message_group) + + for i, server in enumerate(servers): + name = getattr(server, "tool_prefix", f"server-{i}") + monitor.add_server(name, server) + + # Start all servers + async def start_server(server): + async with server: + await asyncio.sleep(0.1) # Keep context alive briefly + return server + + # Start servers in parallel + [asyncio.create_task(start_server(server)) for server in servers] + + # Wait for all to be ready + results = await monitor.wait_all_ready(timeout) + + # Get the report + emit_info(monitor.get_startup_report(), message_group=monitor.message_group) + + # Return ready servers + ready_servers = [ + server for name, server in monitor.servers.items() if results.get(name, False) + ] + + return ready_servers diff --git a/code_puppy/mcp_/captured_stdio_server.py b/code_puppy/mcp_/captured_stdio_server.py new file mode 100644 index 00000000..db52e238 --- /dev/null +++ b/code_puppy/mcp_/captured_stdio_server.py @@ -0,0 +1,275 @@ +""" +Custom MCPServerStdio that captures stderr output properly. + +This module provides a version of MCPServerStdio that captures subprocess +stderr output and makes it available through proper logging channels. +""" + +import asyncio +import logging +import os +from contextlib import asynccontextmanager +from typing import AsyncIterator, Optional, Sequence + +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from mcp.client.stdio import StdioServerParameters, stdio_client +from mcp.shared.session import SessionMessage +from pydantic_ai.mcp import MCPServerStdio + +logger = logging.getLogger(__name__) + + +class StderrCapture: + """ + Captures stderr output using a pipe and background reader. + """ + + def __init__(self, name: str, handler: Optional[callable] = None): + """ + Initialize stderr capture. + + Args: + name: Name for this capture stream + handler: Optional function to call with captured lines + """ + self.name = name + self.handler = handler or self._default_handler + self._captured_lines = [] + self._reader_task = None + self._pipe_r = None + self._pipe_w = None + + def _default_handler(self, line: str): + """Default handler that logs to Python logging.""" + if line.strip(): + logger.debug(f"[MCP {self.name}] {line.rstrip()}") + + async def start_capture(self): + """Start capturing stderr by creating a pipe and reader task.""" + # Create a pipe for capturing stderr + self._pipe_r, self._pipe_w = os.pipe() + + # Make the read end non-blocking + os.set_blocking(self._pipe_r, False) + + # Start background task to read from pipe + self._reader_task = asyncio.create_task(self._read_pipe()) + + # Return the write end as the file descriptor for stderr + return self._pipe_w + + async def _read_pipe(self): + """Background task to read from the pipe.""" + loop = asyncio.get_event_loop() + buffer = b"" + + try: + while True: + # Use asyncio's add_reader for efficient async reading + future = asyncio.Future() + + def read_callback(): + try: + data = os.read(self._pipe_r, 4096) + future.set_result(data) + except BlockingIOError: + future.set_result(b"") + except Exception as e: + future.set_exception(e) + + loop.add_reader(self._pipe_r, read_callback) + try: + data = await future + finally: + loop.remove_reader(self._pipe_r) + + if not data: + await asyncio.sleep(0.1) + continue + + # Process the data + buffer += data + + # Look for complete lines + while b"\n" in buffer: + line, buffer = buffer.split(b"\n", 1) + line_str = line.decode("utf-8", errors="replace") + if line_str: + self._captured_lines.append(line_str) + self.handler(line_str) + + except asyncio.CancelledError: + # Process any remaining buffer + if buffer: + line_str = buffer.decode("utf-8", errors="replace") + if line_str: + self._captured_lines.append(line_str) + self.handler(line_str) + raise + + async def stop_capture(self): + """Stop capturing and clean up.""" + if self._reader_task: + self._reader_task.cancel() + try: + await self._reader_task + except asyncio.CancelledError: + pass + + if self._pipe_r is not None: + os.close(self._pipe_r) + if self._pipe_w is not None: + os.close(self._pipe_w) + + def get_captured_lines(self) -> list[str]: + """Get all captured lines.""" + return self._captured_lines.copy() + + +class CapturedMCPServerStdio(MCPServerStdio): + """ + Extended MCPServerStdio that captures and handles stderr output. + + This class captures stderr from the subprocess and makes it available + through proper logging channels instead of letting it pollute the console. + """ + + def __init__( + self, + command: str, + args: Sequence[str] = (), + env: dict[str, str] | None = None, + cwd: str | None = None, + stderr_handler: Optional[callable] = None, + **kwargs, + ): + """ + Initialize captured stdio server. + + Args: + command: The command to run + args: Arguments for the command + env: Environment variables + cwd: Working directory + stderr_handler: Optional function to handle stderr lines + **kwargs: Additional arguments for MCPServerStdio + """ + super().__init__(command=command, args=args, env=env, cwd=cwd, **kwargs) + self.stderr_handler = stderr_handler + self._stderr_capture = None + self._captured_lines = [] + + @asynccontextmanager + async def client_streams( + self, + ) -> AsyncIterator[ + tuple[ + MemoryObjectReceiveStream[SessionMessage | Exception], + MemoryObjectSendStream[SessionMessage], + ] + ]: + """Create the streams for the MCP server with stderr capture.""" + server = StdioServerParameters( + command=self.command, args=list(self.args), env=self.env, cwd=self.cwd + ) + + # Create stderr capture + def stderr_line_handler(line: str): + """Handle captured stderr lines.""" + self._captured_lines.append(line) + + if self.stderr_handler: + self.stderr_handler(line) + else: + # Default: log at DEBUG level to avoid console spam + logger.debug(f"[MCP Server {self.command}] {line}") + + self._stderr_capture = StderrCapture(self.command, stderr_line_handler) + + # For now, use devnull for stderr to suppress output + # We'll capture it through other means if needed + with open(os.devnull, "w") as devnull: + async with stdio_client(server=server, errlog=devnull) as ( + read_stream, + write_stream, + ): + yield read_stream, write_stream + + def get_captured_stderr(self) -> list[str]: + """ + Get all captured stderr lines. + + Returns: + List of captured stderr lines + """ + return self._captured_lines.copy() + + def clear_captured_stderr(self): + """Clear the captured stderr buffer.""" + self._captured_lines.clear() + + +class StderrCollector: + """ + A centralized collector for stderr from multiple MCP servers. + + This can be used to aggregate stderr from all MCP servers in one place. + """ + + def __init__(self): + """Initialize the collector.""" + self.servers = {} + self.all_lines = [] + + def create_handler(self, server_name: str, emit_to_user: bool = False): + """ + Create a handler function for a specific server. + + Args: + server_name: Name to identify this server + emit_to_user: If True, emit stderr lines to user via emit_info + + Returns: + Handler function that can be passed to CapturedMCPServerStdio + """ + + def handler(line: str): + # Store with server identification + import time + + entry = {"server": server_name, "line": line, "timestamp": time.time()} + + if server_name not in self.servers: + self.servers[server_name] = [] + + self.servers[server_name].append(line) + self.all_lines.append(entry) + + # Emit to user if requested + if emit_to_user: + from code_puppy.messaging import emit_info + + emit_info(f"[MCP {server_name}] {line}", style="dim cyan") + + return handler + + def get_server_output(self, server_name: str) -> list[str]: + """Get all output from a specific server.""" + return self.servers.get(server_name, []).copy() + + def get_all_output(self) -> list[dict]: + """Get all output from all servers with metadata.""" + return self.all_lines.copy() + + def clear(self, server_name: Optional[str] = None): + """Clear captured output.""" + if server_name: + if server_name in self.servers: + self.servers[server_name].clear() + # Also clear from all_lines + self.all_lines = [ + entry for entry in self.all_lines if entry["server"] != server_name + ] + else: + self.servers.clear() + self.all_lines.clear() diff --git a/code_puppy/mcp_/circuit_breaker.py b/code_puppy/mcp_/circuit_breaker.py new file mode 100644 index 00000000..5685b171 --- /dev/null +++ b/code_puppy/mcp_/circuit_breaker.py @@ -0,0 +1,234 @@ +""" +Circuit breaker implementation for MCP servers to prevent cascading failures. + +This module implements the circuit breaker pattern to protect against cascading +failures when MCP servers become unhealthy. The circuit breaker has three states: +- CLOSED: Normal operation, calls pass through +- OPEN: Calls are blocked and fail fast +- HALF_OPEN: Limited calls allowed to test recovery +""" + +import asyncio +import logging +import time +from enum import Enum +from typing import Any, Callable + +logger = logging.getLogger(__name__) + + +class CircuitState(Enum): + """Circuit breaker states.""" + + CLOSED = "closed" # Normal operation + OPEN = "open" # Blocking calls + HALF_OPEN = "half_open" # Testing recovery + + +class CircuitOpenError(Exception): + """Raised when circuit breaker is in OPEN state.""" + + pass + + +class CircuitBreaker: + """ + Circuit breaker to prevent cascading failures in MCP servers. + + The circuit breaker monitors the success/failure rate of operations and + transitions between states to protect the system from unhealthy dependencies. + + States: + - CLOSED: Normal operation, all calls allowed + - OPEN: Circuit is open, all calls fail fast with CircuitOpenError + - HALF_OPEN: Testing recovery, limited calls allowed + + State Transitions: + - CLOSED → OPEN: After failure_threshold consecutive failures + - OPEN → HALF_OPEN: After timeout seconds + - HALF_OPEN → CLOSED: After success_threshold consecutive successes + - HALF_OPEN → OPEN: After any failure + """ + + def __init__( + self, failure_threshold: int = 5, success_threshold: int = 2, timeout: int = 60 + ): + """ + Initialize circuit breaker. + + Args: + failure_threshold: Number of consecutive failures before opening circuit + success_threshold: Number of consecutive successes needed to close circuit from half-open + timeout: Seconds to wait before transitioning from OPEN to HALF_OPEN + """ + self.failure_threshold = failure_threshold + self.success_threshold = success_threshold + self.timeout = timeout + + self._state = CircuitState.CLOSED + self._failure_count = 0 + self._success_count = 0 + self._last_failure_time = None + self._lock = asyncio.Lock() + + logger.info( + f"Circuit breaker initialized: failure_threshold={failure_threshold}, " + f"success_threshold={success_threshold}, timeout={timeout}s" + ) + + async def call(self, func: Callable, *args, **kwargs) -> Any: + """ + Execute a function through the circuit breaker. + + Args: + func: Function to execute + *args: Positional arguments for the function + **kwargs: Keyword arguments for the function + + Returns: + Result of the function call + + Raises: + CircuitOpenError: If circuit is in OPEN state + Exception: Any exception raised by the wrapped function + """ + async with self._lock: + current_state = self._get_current_state() + + if current_state == CircuitState.OPEN: + logger.warning("Circuit breaker is OPEN, failing fast") + raise CircuitOpenError("Circuit breaker is open") + + if current_state == CircuitState.HALF_OPEN: + # In half-open state, we're testing recovery + logger.info("Circuit breaker is HALF_OPEN, allowing test call") + + # Execute the function outside the lock to avoid blocking other calls + try: + result = ( + await func(*args, **kwargs) + if asyncio.iscoroutinefunction(func) + else func(*args, **kwargs) + ) + await self._on_success() + return result + except Exception as e: + await self._on_failure() + raise e + + def record_success(self) -> None: + """Record a successful operation.""" + asyncio.create_task(self._on_success()) + + def record_failure(self) -> None: + """Record a failed operation.""" + asyncio.create_task(self._on_failure()) + + def get_state(self) -> CircuitState: + """Get current circuit breaker state.""" + return self._get_current_state() + + def is_open(self) -> bool: + """Check if circuit breaker is in OPEN state.""" + return self._get_current_state() == CircuitState.OPEN + + def is_half_open(self) -> bool: + """Check if circuit breaker is in HALF_OPEN state.""" + return self._get_current_state() == CircuitState.HALF_OPEN + + def is_closed(self) -> bool: + """Check if circuit breaker is in CLOSED state.""" + return self._get_current_state() == CircuitState.CLOSED + + def reset(self) -> None: + """Reset circuit breaker to CLOSED state and clear counters.""" + logger.info("Resetting circuit breaker to CLOSED state") + self._state = CircuitState.CLOSED + self._failure_count = 0 + self._success_count = 0 + self._last_failure_time = None + + def force_open(self) -> None: + """Force circuit breaker to OPEN state.""" + logger.warning("Forcing circuit breaker to OPEN state") + self._state = CircuitState.OPEN + self._last_failure_time = time.time() + + def force_close(self) -> None: + """Force circuit breaker to CLOSED state and reset counters.""" + logger.info("Forcing circuit breaker to CLOSED state") + self._state = CircuitState.CLOSED + self._failure_count = 0 + self._success_count = 0 + self._last_failure_time = None + + def _get_current_state(self) -> CircuitState: + """ + Get the current state, handling automatic transitions. + + This method handles the automatic transition from OPEN to HALF_OPEN + after the timeout period has elapsed. + """ + if self._state == CircuitState.OPEN and self._should_attempt_reset(): + logger.info("Timeout reached, transitioning from OPEN to HALF_OPEN") + self._state = CircuitState.HALF_OPEN + self._success_count = 0 # Reset success counter for half-open testing + + return self._state + + def _should_attempt_reset(self) -> bool: + """Check if enough time has passed to attempt reset from OPEN to HALF_OPEN.""" + if self._last_failure_time is None: + return False + + return time.time() - self._last_failure_time >= self.timeout + + async def _on_success(self) -> None: + """Handle successful operation.""" + async with self._lock: + current_state = self._get_current_state() + + if current_state == CircuitState.CLOSED: + # Reset failure count on success in closed state + if self._failure_count > 0: + logger.debug("Resetting failure count after success") + self._failure_count = 0 + + elif current_state == CircuitState.HALF_OPEN: + self._success_count += 1 + logger.debug( + f"Success in HALF_OPEN state: {self._success_count}/{self.success_threshold}" + ) + + if self._success_count >= self.success_threshold: + logger.info( + "Success threshold reached, transitioning from HALF_OPEN to CLOSED" + ) + self._state = CircuitState.CLOSED + self._failure_count = 0 + self._success_count = 0 + self._last_failure_time = None + + async def _on_failure(self) -> None: + """Handle failed operation.""" + async with self._lock: + current_state = self._get_current_state() + + if current_state == CircuitState.CLOSED: + self._failure_count += 1 + logger.debug( + f"Failure in CLOSED state: {self._failure_count}/{self.failure_threshold}" + ) + + if self._failure_count >= self.failure_threshold: + logger.warning( + "Failure threshold reached, transitioning from CLOSED to OPEN" + ) + self._state = CircuitState.OPEN + self._last_failure_time = time.time() + + elif current_state == CircuitState.HALF_OPEN: + logger.warning("Failure in HALF_OPEN state, transitioning back to OPEN") + self._state = CircuitState.OPEN + self._success_count = 0 + self._last_failure_time = time.time() diff --git a/code_puppy/mcp_/config_wizard.py b/code_puppy/mcp_/config_wizard.py new file mode 100644 index 00000000..60f851b9 --- /dev/null +++ b/code_puppy/mcp_/config_wizard.py @@ -0,0 +1,504 @@ +""" +MCP Configuration Wizard - Interactive setup for MCP servers. + +Note: This module imports ServerConfig and get_mcp_manager directly from +.code_puppy.mcp.manager to avoid circular imports with the package __init__.py +""" + +import re +from typing import Dict, Optional +from urllib.parse import urlparse + +from rich.console import Console + +from code_puppy.mcp_.manager import ServerConfig, get_mcp_manager +from code_puppy.messaging import ( + emit_error, + emit_info, + emit_prompt, + emit_success, + emit_warning, +) + +console = Console() + + +def prompt_ask( + prompt_text: str, default: Optional[str] = None, choices: Optional[list] = None +) -> Optional[str]: + """Helper function to replace rich.prompt.Prompt.ask with emit_prompt.""" + try: + if default: + full_prompt = f"{prompt_text} [{default}]" + else: + full_prompt = prompt_text + + if choices: + full_prompt += f" ({'/'.join(choices)})" + + response = emit_prompt(full_prompt + ": ") + + # Handle default value + if not response.strip() and default: + return default + + # Handle choices validation + if choices and response.strip() and response.strip() not in choices: + emit_error(f"Invalid choice. Must be one of: {', '.join(choices)}") + return None + + return response.strip() if response.strip() else None + except Exception as e: + emit_error(f"Input error: {e}") + return None + + +def confirm_ask(prompt_text: str, default: bool = True) -> bool: + """Helper function to replace rich.prompt.Confirm.ask with emit_prompt.""" + try: + default_text = "[Y/n]" if default else "[y/N]" + response = emit_prompt(f"{prompt_text} {default_text}: ") + + if not response.strip(): + return default + + response_lower = response.strip().lower() + if response_lower in ["y", "yes", "true", "1"]: + return True + elif response_lower in ["n", "no", "false", "0"]: + return False + else: + return default + except Exception as e: + emit_error(f"Input error: {e}") + return default + + +class MCPConfigWizard: + """Interactive wizard for configuring MCP servers.""" + + def __init__(self): + self.manager = get_mcp_manager() + + def run_wizard(self, group_id: str = None) -> Optional[ServerConfig]: + """ + Run the interactive configuration wizard. + + Args: + group_id: Optional message group ID for grouping related messages + + Returns: + ServerConfig if successful, None if cancelled + """ + if group_id is None: + import uuid + + group_id = str(uuid.uuid4()) + + emit_info("🧙 MCP Server Configuration Wizard", message_group=group_id) + + # Step 1: Server name + name = self.prompt_server_name(group_id) + if not name: + return None + + # Step 2: Server type + server_type = self.prompt_server_type(group_id) + if not server_type: + return None + + # Step 3: Type-specific configuration + config = {} + if server_type == "sse": + config = self.prompt_sse_config(group_id) + elif server_type == "http": + config = self.prompt_http_config(group_id) + elif server_type == "stdio": + config = self.prompt_stdio_config(group_id) + + if not config: + return None + + # Step 4: Create ServerConfig + server_config = ServerConfig( + id=f"{name}_{hash(name)}", + name=name, + type=server_type, + enabled=True, + config=config, + ) + + # Step 5: Show summary and confirm + if self.prompt_confirmation(server_config, group_id): + return server_config + + return None + + def prompt_server_name(self, group_id: str = None) -> Optional[str]: + """Prompt for server name with validation.""" + while True: + name = prompt_ask("Enter server name", default=None) + + if not name: + if not confirm_ask("Cancel configuration?", default=False): + continue + return None + + # Validate name + if not self.validate_name(name): + emit_error( + "Name must be alphanumeric with hyphens/underscores only", + message_group=group_id, + ) + continue + + # Check uniqueness + existing = self.manager.registry.get_by_name(name) + if existing: + emit_error(f"Server '{name}' already exists", message_group=group_id) + continue + + return name + + def prompt_server_type(self, group_id: str = None) -> Optional[str]: + """Prompt for server type.""" + emit_info("\nServer types:", message_group=group_id) + emit_info( + " sse - Server-Sent Events (HTTP streaming)", message_group=group_id + ) + emit_info(" http - HTTP/REST API", message_group=group_id) + emit_info(" stdio - Local command (subprocess)", message_group=group_id) + + while True: + server_type = prompt_ask( + "Select server type", choices=["sse", "http", "stdio"], default="stdio" + ) + + if server_type in ["sse", "http", "stdio"]: + return server_type + + emit_error( + "Invalid type. Choose: sse, http, or stdio", message_group=group_id + ) + + def prompt_sse_config(self, group_id: str = None) -> Optional[Dict]: + """Prompt for SSE server configuration.""" + emit_info("Configuring SSE server", message_group=group_id) + + # URL + url = self.prompt_url("SSE", group_id) + if not url: + return None + + config = {"type": "sse", "url": url, "timeout": 30} + + # Headers (optional) + if confirm_ask("Add custom headers?", default=False): + headers = self.prompt_headers(group_id) + if headers: + config["headers"] = headers + + # Timeout + timeout_str = prompt_ask("Connection timeout (seconds)", default="30") + try: + config["timeout"] = int(timeout_str) + except ValueError: + config["timeout"] = 30 + + return config + + def prompt_http_config(self, group_id: str = None) -> Optional[Dict]: + """Prompt for HTTP server configuration.""" + emit_info("Configuring HTTP server", message_group=group_id) + + # URL + url = self.prompt_url("HTTP", group_id) + if not url: + return None + + config = {"type": "http", "url": url, "timeout": 30} + + # Headers (optional) + if confirm_ask("Add custom headers?", default=False): + headers = self.prompt_headers(group_id) + if headers: + config["headers"] = headers + + # Timeout + timeout_str = prompt_ask("Request timeout (seconds)", default="30") + try: + config["timeout"] = int(timeout_str) + except ValueError: + config["timeout"] = 30 + + return config + + def prompt_stdio_config(self, group_id: str = None) -> Optional[Dict]: + """Prompt for Stdio server configuration.""" + emit_info("Configuring Stdio server", message_group=group_id) + emit_info("Examples:", message_group=group_id) + emit_info( + " • npx -y @modelcontextprotocol/server-filesystem /path", + message_group=group_id, + ) + emit_info(" • python mcp_server.py", message_group=group_id) + emit_info(" • node server.js", message_group=group_id) + + # Command + command = prompt_ask("Enter command", default=None) + + if not command: + return None + + config = {"type": "stdio", "command": command, "args": [], "timeout": 30} + + # Arguments + args_str = prompt_ask("Enter arguments (space-separated)", default="") + if args_str: + # Simple argument parsing (handles quoted strings) + import shlex + + try: + config["args"] = shlex.split(args_str) + except ValueError: + config["args"] = args_str.split() + + # Working directory (optional) + cwd = prompt_ask("Working directory (optional)", default="") + if cwd: + import os + + if os.path.isdir(os.path.expanduser(cwd)): + config["cwd"] = os.path.expanduser(cwd) + else: + emit_warning( + f"Directory '{cwd}' not found, ignoring", message_group=group_id + ) + + # Environment variables (optional) + if confirm_ask("Add environment variables?", default=False): + env = self.prompt_env_vars(group_id) + if env: + config["env"] = env + + # Timeout + timeout_str = prompt_ask("Startup timeout (seconds)", default="30") + try: + config["timeout"] = int(timeout_str) + except ValueError: + config["timeout"] = 30 + + return config + + def prompt_url(self, server_type: str, group_id: str = None) -> Optional[str]: + """Prompt for and validate URL.""" + while True: + url = prompt_ask(f"Enter {server_type} server URL", default=None) + + if not url: + if confirm_ask("Cancel configuration?", default=False): + return None + continue + + if self.validate_url(url): + return url + + emit_error( + "Invalid URL. Must be http:// or https://", message_group=group_id + ) + + def prompt_headers(self, group_id: str = None) -> Dict[str, str]: + """Prompt for HTTP headers.""" + headers = {} + emit_info("Enter headers (format: Name: Value)", message_group=group_id) + emit_info("Press Enter with empty name to finish", message_group=group_id) + + while True: + name = prompt_ask("Header name", default="") + if not name: + break + + value = prompt_ask(f"Value for '{name}'", default="") + headers[name] = value + + if not confirm_ask("Add another header?", default=True): + break + + return headers + + def prompt_env_vars(self, group_id: str = None) -> Dict[str, str]: + """Prompt for environment variables.""" + env = {} + emit_info("Enter environment variables", message_group=group_id) + emit_info("Press Enter with empty name to finish", message_group=group_id) + + while True: + name = prompt_ask("Variable name", default="") + if not name: + break + + value = prompt_ask(f"Value for '{name}'", default="") + env[name] = value + + if not confirm_ask("Add another variable?", default=True): + break + + return env + + def validate_name(self, name: str) -> bool: + """Validate server name.""" + # Allow alphanumeric, hyphens, and underscores + return bool(re.match(r"^[a-zA-Z0-9_-]+$", name)) + + def validate_url(self, url: str) -> bool: + """Validate URL format.""" + try: + result = urlparse(url) + return result.scheme in ("http", "https") and bool(result.netloc) + except Exception: + return False + + def validate_command(self, command: str) -> bool: + """Check if command exists (basic check).""" + import os + import shutil + + # If it's a path, check if file exists + if "/" in command or "\\" in command: + return os.path.isfile(command) + + # Otherwise check if it's in PATH + return shutil.which(command) is not None + + def test_connection(self, config: ServerConfig, group_id: str = None) -> bool: + """ + Test connection to the configured server. + + Args: + config: Server configuration to test + + Returns: + True if connection successful, False otherwise + """ + emit_info("Testing connection...", message_group=group_id) + + try: + # Try to create the server instance + managed = self.manager.get_server(config.id) + if not managed: + # Temporarily register to test + self.manager.register_server(config) + managed = self.manager.get_server(config.id) + + if managed: + # Try to get the pydantic server (this validates config) + server = managed.get_pydantic_server() + if server: + emit_success("✓ Configuration valid", message_group=group_id) + return True + + emit_error("✗ Failed to create server instance", message_group=group_id) + return False + + except Exception as e: + emit_error(f"✗ Configuration error: {e}", message_group=group_id) + return False + + def prompt_confirmation(self, config: ServerConfig, group_id: str = None) -> bool: + """Show summary and ask for confirmation.""" + emit_info("Configuration Summary:", message_group=group_id) + emit_info(f" Name: {config.name}", message_group=group_id) + emit_info(f" Type: {config.type}", message_group=group_id) + + if config.type in ["sse", "http"]: + emit_info(f" URL: {config.config.get('url')}", message_group=group_id) + elif config.type == "stdio": + emit_info( + f" Command: {config.config.get('command')}", message_group=group_id + ) + args = config.config.get("args", []) + if args: + emit_info(f" Arguments: {' '.join(args)}", message_group=group_id) + + emit_info( + f" Timeout: {config.config.get('timeout', 30)}s", message_group=group_id + ) + + # Test connection if requested + if confirm_ask("Test connection?", default=True): + if not self.test_connection(config, group_id): + if not confirm_ask("Continue anyway?", default=False): + return False + + return confirm_ask("Save this configuration?", default=True) + + +def run_add_wizard(group_id: str = None) -> bool: + """ + Run the MCP add wizard and register the server. + + Args: + group_id: Optional message group ID for grouping related messages + + Returns: + True if server was added, False otherwise + """ + if group_id is None: + import uuid + + group_id = str(uuid.uuid4()) + + wizard = MCPConfigWizard() + config = wizard.run_wizard(group_id) + + if config: + try: + manager = get_mcp_manager() + server_id = manager.register_server(config) + + emit_success( + f"\n✅ Server '{config.name}' added successfully!", + message_group=group_id, + ) + emit_info(f"Server ID: {server_id}", message_group=group_id) + emit_info("Use '/mcp list' to see all servers", message_group=group_id) + emit_info( + f"Use '/mcp start {config.name}' to start the server", + message_group=group_id, + ) + + # Also save to mcp_servers.json for persistence + import json + import os + + from code_puppy.config import MCP_SERVERS_FILE + + # Load existing configs + if os.path.exists(MCP_SERVERS_FILE): + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + else: + servers = {} + data = {"mcp_servers": servers} + + # Add new server + servers[config.name] = config.config + + # Save back + os.makedirs(os.path.dirname(MCP_SERVERS_FILE), exist_ok=True) + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + + emit_info( + f"[dim]Configuration saved to {MCP_SERVERS_FILE}[/dim]", + message_group=group_id, + ) + return True + + except Exception as e: + emit_error(f"Failed to add server: {e}", message_group=group_id) + return False + else: + emit_warning("Configuration cancelled", message_group=group_id) + return False diff --git a/code_puppy/mcp_/dashboard.py b/code_puppy/mcp_/dashboard.py new file mode 100644 index 00000000..5e25cc8c --- /dev/null +++ b/code_puppy/mcp_/dashboard.py @@ -0,0 +1,299 @@ +""" +MCP Dashboard Implementation + +Provides visual status dashboard for MCP servers using Rich tables. +""" + +from datetime import datetime +from typing import Dict, List, Optional + +from rich import box +from rich.console import Console +from rich.table import Table + +from .manager import get_mcp_manager +from .status_tracker import ServerState + + +class MCPDashboard: + """Visual dashboard for MCP server status monitoring""" + + def __init__(self): + """Initialize the MCP Dashboard""" + self.console = Console() + + def render_dashboard(self) -> Table: + """ + Render the main MCP server status dashboard + + Returns: + Table: Rich table with server status information + """ + # Create the main table + table = Table( + title="MCP Server Status Dashboard", + box=box.ROUNDED, + show_header=True, + header_style="bold blue", + title_style="bold cyan", + ) + + # Define columns + table.add_column("Name", style="white", no_wrap=True, min_width=10) + table.add_column("Type", style="white", no_wrap=True, width=8) + table.add_column("State", style="white", no_wrap=True, width=8) + table.add_column("Health", style="white", no_wrap=True, width=8) + table.add_column("Uptime", style="white", no_wrap=True, width=10) + table.add_column("Latency", style="white", no_wrap=True, width=10) + + # Get manager and server info + try: + manager = get_mcp_manager() + servers = manager.list_servers() + + if not servers: + # Empty state + table.add_row( + "[dim]No servers configured[/dim]", "-", "-", "-", "-", "-" + ) + else: + # Add row for each server + for server in servers: + row_data = self.render_server_row(server) + table.add_row(*row_data) + + except Exception as e: + # Error state + table.add_row( + "[red]Error loading servers[/red]", + "-", + "-", + "-", + "-", + f"[red]{str(e)}[/red]", + ) + + return table + + def render_server_row(self, server) -> List[str]: + """ + Render a single server row for the dashboard + + Args: + server: ServerInfo object with server details + + Returns: + List[str]: Formatted row data for the table + """ + # Server name + name = server.name or server.id[:8] + + # Server type + server_type = server.type.upper() if server.type else "UNK" + + # State indicator + state_indicator = self.render_state_indicator(server.state) + + # Health indicator + health_indicator = self.render_health_indicator(server.health) + + # Uptime + uptime_str = self.format_uptime(server.start_time) if server.start_time else "-" + + # Latency + latency_str = ( + self.format_latency(server.latency_ms) + if server.latency_ms is not None + else "-" + ) + + return [ + name, + server_type, + state_indicator, + health_indicator, + uptime_str, + latency_str, + ] + + def render_health_indicator(self, health: Optional[Dict]) -> str: + """ + Render health status indicator + + Args: + health: Health status dictionary or None + + Returns: + str: Formatted health indicator with color + """ + if not health: + return "[dim]?[/dim]" + + is_healthy = health.get("is_healthy", False) + error = health.get("error") + + if is_healthy: + return "[green]✓[/green]" + elif error: + return "[red]✗[/red]" + else: + return "[yellow]?[/yellow]" + + def render_state_indicator(self, state: ServerState) -> str: + """ + Render server state indicator + + Args: + state: Current server state + + Returns: + str: Formatted state indicator with color and symbol + """ + indicators = { + ServerState.RUNNING: "[green]✓ Run[/green]", + ServerState.STOPPED: "[red]✗ Stop[/red]", + ServerState.ERROR: "[red]⚠ Err[/red]", + ServerState.STARTING: "[yellow]⏳ Start[/yellow]", + ServerState.STOPPING: "[yellow]⏳ Stop[/yellow]", + ServerState.QUARANTINED: "[yellow]⏸ Quar[/yellow]", + } + + return indicators.get(state, "[dim]? Unk[/dim]") + + def render_metrics_summary(self, metrics: Dict) -> str: + """ + Render a summary of server metrics + + Args: + metrics: Dictionary of server metrics + + Returns: + str: Formatted metrics summary + """ + if not metrics: + return "No metrics" + + parts = [] + + # Request count + if "request_count" in metrics: + parts.append(f"Req: {metrics['request_count']}") + + # Error rate + if "error_rate" in metrics: + error_rate = metrics["error_rate"] + if error_rate > 0.1: # 10% + parts.append(f"[red]Err: {error_rate:.1%}[/red]") + elif error_rate > 0.05: # 5% + parts.append(f"[yellow]Err: {error_rate:.1%}[/yellow]") + else: + parts.append(f"[green]Err: {error_rate:.1%}[/green]") + + # Response time + if "avg_response_time" in metrics: + avg_time = metrics["avg_response_time"] + parts.append(f"Avg: {avg_time:.0f}ms") + + return " | ".join(parts) if parts else "No data" + + def format_uptime(self, start_time: datetime) -> str: + """ + Format uptime duration in human readable format + + Args: + start_time: Server start timestamp + + Returns: + str: Formatted uptime string (e.g., "2h 15m") + """ + if not start_time: + return "-" + + try: + uptime = datetime.now() - start_time + + # Handle negative uptime (clock skew, etc.) + if uptime.total_seconds() < 0: + return "0s" + + # Format based on duration + total_seconds = int(uptime.total_seconds()) + + if total_seconds < 60: # Less than 1 minute + return f"{total_seconds}s" + elif total_seconds < 3600: # Less than 1 hour + minutes = total_seconds // 60 + seconds = total_seconds % 60 + if seconds > 0: + return f"{minutes}m {seconds}s" + else: + return f"{minutes}m" + elif total_seconds < 86400: # Less than 1 day + hours = total_seconds // 3600 + minutes = (total_seconds % 3600) // 60 + if minutes > 0: + return f"{hours}h {minutes}m" + else: + return f"{hours}h" + else: # 1 day or more + days = total_seconds // 86400 + hours = (total_seconds % 86400) // 3600 + if hours > 0: + return f"{days}d {hours}h" + else: + return f"{days}d" + + except Exception: + return "?" + + def format_latency(self, latency_ms: float) -> str: + """ + Format latency in human readable format + + Args: + latency_ms: Latency in milliseconds + + Returns: + str: Formatted latency string with color coding + """ + if latency_ms is None: + return "-" + + try: + if latency_ms < 0: + return "invalid" + elif latency_ms < 50: # Fast + return f"[green]{latency_ms:.0f}ms[/green]" + elif latency_ms < 200: # Acceptable + return f"[yellow]{latency_ms:.0f}ms[/yellow]" + elif latency_ms < 1000: # Slow + return f"[red]{latency_ms:.0f}ms[/red]" + elif latency_ms >= 30000: # Timeout (30s+) + return "[red]timeout[/red]" + else: # Very slow + seconds = latency_ms / 1000 + return f"[red]{seconds:.1f}s[/red]" + + except (ValueError, TypeError): + return "error" + + def print_dashboard(self) -> None: + """Print the dashboard to console""" + table = self.render_dashboard() + self.console.print(table) + self.console.print() # Add spacing + + def get_dashboard_string(self) -> str: + """ + Get dashboard as a string for programmatic use + + Returns: + str: Dashboard rendered as plain text + """ + # Create a console that captures output + console = Console(file=None, width=80) + + with console.capture() as capture: + console.print(self.render_dashboard()) + + return capture.get() diff --git a/code_puppy/mcp_/error_isolation.py b/code_puppy/mcp_/error_isolation.py new file mode 100644 index 00000000..241c8621 --- /dev/null +++ b/code_puppy/mcp_/error_isolation.py @@ -0,0 +1,407 @@ +""" +MCP Error Isolation System + +This module provides error isolation for MCP server calls to prevent +server errors from crashing the application. It implements quarantine +logic with exponential backoff for failed servers. +""" + +import asyncio +import logging +from dataclasses import dataclass, field +from datetime import datetime, timedelta +from enum import Enum +from typing import Any, Callable, Dict, Optional + +logger = logging.getLogger(__name__) + + +@dataclass +class ErrorStats: + """Statistics for MCP server errors and quarantine status.""" + + total_errors: int = 0 + consecutive_errors: int = 0 + last_error: Optional[datetime] = None + error_types: Dict[str, int] = field(default_factory=dict) + quarantine_count: int = 0 + quarantine_until: Optional[datetime] = None + + +class ErrorCategory(Enum): + """Categories of errors that can be isolated.""" + + NETWORK = "network" + PROTOCOL = "protocol" + SERVER = "server" + RATE_LIMIT = "rate_limit" + AUTHENTICATION = "authentication" + UNKNOWN = "unknown" + + +class MCPErrorIsolator: + """ + Isolates MCP server errors to prevent application crashes. + + Features: + - Quarantine servers after consecutive failures + - Exponential backoff for quarantine duration + - Error categorization and tracking + - Automatic recovery after successful calls + """ + + def __init__(self, quarantine_threshold: int = 5, max_quarantine_minutes: int = 30): + """ + Initialize the error isolator. + + Args: + quarantine_threshold: Number of consecutive errors to trigger quarantine + max_quarantine_minutes: Maximum quarantine duration in minutes + """ + self.quarantine_threshold = quarantine_threshold + self.max_quarantine_duration = timedelta(minutes=max_quarantine_minutes) + self.server_stats: Dict[str, ErrorStats] = {} + self._lock = asyncio.Lock() + + logger.info( + f"MCPErrorIsolator initialized with threshold={quarantine_threshold}, " + f"max_quarantine={max_quarantine_minutes}min" + ) + + async def isolated_call( + self, server_id: str, func: Callable, *args, **kwargs + ) -> Any: + """ + Execute a function call with error isolation. + + Args: + server_id: ID of the MCP server making the call + func: Function to execute + *args: Arguments for the function + **kwargs: Keyword arguments for the function + + Returns: + Result of the function call + + Raises: + Exception: If the server is quarantined or the call fails + """ + async with self._lock: + # Check if server is quarantined + if self.is_quarantined(server_id): + quarantine_until = self.server_stats[server_id].quarantine_until + raise QuarantinedServerError( + f"Server {server_id} is quarantined until {quarantine_until}" + ) + + try: + # Execute the function + if asyncio.iscoroutinefunction(func): + result = await func(*args, **kwargs) + else: + result = func(*args, **kwargs) + + # Record success + async with self._lock: + await self._record_success(server_id) + + return result + + except Exception as error: + # Record and categorize the error + async with self._lock: + await self._record_error(server_id, error) + + # Re-raise the error + raise + + async def quarantine_server(self, server_id: str, duration: int) -> None: + """ + Manually quarantine a server for a specific duration. + + Args: + server_id: ID of the server to quarantine + duration: Quarantine duration in seconds + """ + async with self._lock: + stats = self._get_or_create_stats(server_id) + stats.quarantine_until = datetime.now() + timedelta(seconds=duration) + stats.quarantine_count += 1 + + logger.warning( + f"Server {server_id} quarantined for {duration}s " + f"(count: {stats.quarantine_count})" + ) + + def is_quarantined(self, server_id: str) -> bool: + """ + Check if a server is currently quarantined. + + Args: + server_id: ID of the server to check + + Returns: + True if the server is quarantined, False otherwise + """ + if server_id not in self.server_stats: + return False + + stats = self.server_stats[server_id] + if stats.quarantine_until is None: + return False + + # Check if quarantine has expired + if datetime.now() >= stats.quarantine_until: + stats.quarantine_until = None + return False + + return True + + async def release_quarantine(self, server_id: str) -> None: + """ + Manually release a server from quarantine. + + Args: + server_id: ID of the server to release + """ + async with self._lock: + if server_id in self.server_stats: + self.server_stats[server_id].quarantine_until = None + logger.info(f"Server {server_id} released from quarantine") + + def get_error_stats(self, server_id: str) -> ErrorStats: + """ + Get error statistics for a server. + + Args: + server_id: ID of the server + + Returns: + ErrorStats object with current statistics + """ + if server_id not in self.server_stats: + return ErrorStats() + + return self.server_stats[server_id] + + def should_quarantine(self, server_id: str) -> bool: + """ + Check if a server should be quarantined based on error count. + + Args: + server_id: ID of the server to check + + Returns: + True if the server should be quarantined + """ + if server_id not in self.server_stats: + return False + + stats = self.server_stats[server_id] + return stats.consecutive_errors >= self.quarantine_threshold + + def _get_or_create_stats(self, server_id: str) -> ErrorStats: + """Get or create error stats for a server.""" + if server_id not in self.server_stats: + self.server_stats[server_id] = ErrorStats() + return self.server_stats[server_id] + + async def _record_success(self, server_id: str) -> None: + """Record a successful call and reset consecutive error count.""" + stats = self._get_or_create_stats(server_id) + stats.consecutive_errors = 0 + + logger.debug( + f"Success recorded for server {server_id}, consecutive errors reset" + ) + + async def _record_error(self, server_id: str, error: Exception) -> None: + """Record an error and potentially quarantine the server.""" + stats = self._get_or_create_stats(server_id) + + # Update error statistics + stats.total_errors += 1 + stats.consecutive_errors += 1 + stats.last_error = datetime.now() + + # Categorize the error + error_category = self._categorize_error(error) + error_type = error_category.value + stats.error_types[error_type] = stats.error_types.get(error_type, 0) + 1 + + logger.warning( + f"Error recorded for server {server_id}: {error_type} - {str(error)} " + f"(consecutive: {stats.consecutive_errors})" + ) + + # Check if quarantine is needed + if self.should_quarantine(server_id): + quarantine_duration = self._calculate_quarantine_duration( + stats.quarantine_count + ) + stats.quarantine_until = datetime.now() + timedelta( + seconds=quarantine_duration + ) + stats.quarantine_count += 1 + + logger.error( + f"Server {server_id} quarantined for {quarantine_duration}s " + f"after {stats.consecutive_errors} consecutive errors " + f"(quarantine count: {stats.quarantine_count})" + ) + + def _categorize_error(self, error: Exception) -> ErrorCategory: + """ + Categorize an error based on its type and properties. + + Args: + error: The exception to categorize + + Returns: + ErrorCategory enum value + """ + error_type = type(error).__name__.lower() + error_message = str(error).lower() + + # Network errors + if any( + keyword in error_type + for keyword in ["connection", "timeout", "network", "socket", "dns", "ssl"] + ): + return ErrorCategory.NETWORK + + if any( + keyword in error_message + for keyword in [ + "connection", + "timeout", + "network", + "unreachable", + "refused", + ] + ): + return ErrorCategory.NETWORK + + # Protocol errors + if any( + keyword in error_type + for keyword in [ + "json", + "decode", + "parse", + "schema", + "validation", + "protocol", + ] + ): + return ErrorCategory.PROTOCOL + + if any( + keyword in error_message + for keyword in ["json", "decode", "parse", "invalid", "malformed", "schema"] + ): + return ErrorCategory.PROTOCOL + + # Authentication errors + if any( + keyword in error_type + for keyword in ["auth", "permission", "unauthorized", "forbidden"] + ): + return ErrorCategory.AUTHENTICATION + + if any( + keyword in error_message + for keyword in [ + "401", + "403", + "unauthorized", + "forbidden", + "authentication", + "permission", + ] + ): + return ErrorCategory.AUTHENTICATION + + # Rate limit errors + if any(keyword in error_type for keyword in ["rate", "limit", "throttle"]): + return ErrorCategory.RATE_LIMIT + + if any( + keyword in error_message + for keyword in ["429", "rate limit", "too many requests", "throttle"] + ): + return ErrorCategory.RATE_LIMIT + + # Server errors (5xx responses) + if any( + keyword in error_message + for keyword in [ + "500", + "501", + "502", + "503", + "504", + "505", + "internal server error", + "bad gateway", + "service unavailable", + "gateway timeout", + ] + ): + return ErrorCategory.SERVER + + if any(keyword in error_type for keyword in ["server", "internal"]): + return ErrorCategory.SERVER + + # Default to unknown + return ErrorCategory.UNKNOWN + + def _calculate_quarantine_duration(self, quarantine_count: int) -> int: + """ + Calculate quarantine duration using exponential backoff. + + Args: + quarantine_count: Number of times this server has been quarantined + + Returns: + Quarantine duration in seconds + """ + # Base duration: 30 seconds + base_duration = 30 + + # Exponential backoff: 30s, 60s, 120s, 240s, etc. + duration = base_duration * (2**quarantine_count) + + # Cap at maximum duration (convert to seconds) + max_seconds = int(self.max_quarantine_duration.total_seconds()) + duration = min(duration, max_seconds) + + logger.debug( + f"Calculated quarantine duration: {duration}s " + f"(count: {quarantine_count}, max: {max_seconds}s)" + ) + + return duration + + +class QuarantinedServerError(Exception): + """Raised when attempting to call a quarantined server.""" + + pass + + +# Global isolator instance +_isolator_instance: Optional[MCPErrorIsolator] = None + + +def get_error_isolator() -> MCPErrorIsolator: + """ + Get the global MCPErrorIsolator instance. + + Returns: + MCPErrorIsolator instance + """ + global _isolator_instance + if _isolator_instance is None: + _isolator_instance = MCPErrorIsolator() + return _isolator_instance diff --git a/code_puppy/mcp_/examples/retry_example.py b/code_puppy/mcp_/examples/retry_example.py new file mode 100644 index 00000000..1761a384 --- /dev/null +++ b/code_puppy/mcp_/examples/retry_example.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python3 +""" +Example usage of RetryManager with MCP server operations. + +This demonstrates how the RetryManager can be integrated with MCP server calls +to handle transient failures gracefully with intelligent backoff strategies. +""" + +import asyncio +import logging +import random +import sys +from pathlib import Path +from typing import Any + +# Add project root to path +project_root = Path(__file__).parents[3] +sys.path.insert(0, str(project_root)) + +from code_puppy.mcp_.retry_manager import ( # noqa: E402 + get_retry_manager, + retry_mcp_call, +) + +logger = logging.getLogger(__name__) + + +class MockMCPServer: + """Mock MCP server for demonstration purposes.""" + + def __init__(self, failure_rate: float = 0.3): + """ + Initialize the mock server. + + Args: + failure_rate: Probability of failure (0.0 to 1.0) + """ + self.failure_rate = failure_rate + self.call_count = 0 + + async def list_tools(self) -> list: + """Simulate listing available tools.""" + self.call_count += 1 + + # Simulate random failures + if random.random() < self.failure_rate: + raise ConnectionError( + f"Simulated connection failure (call #{self.call_count})" + ) + + return [ + {"name": "read_file", "description": "Read a file"}, + {"name": "write_file", "description": "Write a file"}, + {"name": "list_directory", "description": "List directory contents"}, + ] + + async def call_tool(self, name: str, args: dict) -> Any: + """Simulate calling a tool.""" + self.call_count += 1 + + # Simulate random failures + if random.random() < self.failure_rate: + if random.random() < 0.5: + raise ConnectionError(f"Connection failed for {name}") + else: + # Simulate a 500 error + from unittest.mock import Mock + + import httpx + + response = Mock() + response.status_code = 500 + raise httpx.HTTPStatusError( + "Server Error", request=Mock(), response=response + ) + + return f"Tool '{name}' executed with args: {args}" + + +async def demonstrate_basic_retry(): + """Demonstrate basic retry functionality.""" + print("=== Basic Retry Demonstration ===") + + retry_manager = get_retry_manager() + server = MockMCPServer(failure_rate=0.5) # 50% failure rate + + async def list_tools_call(): + return await server.list_tools() + + try: + result = await retry_manager.retry_with_backoff( + func=list_tools_call, + max_attempts=3, + strategy="exponential", + server_id="demo-server", + ) + print(f"✅ Success: Retrieved {len(result)} tools") + print(f"Server call count: {server.call_count}") + except Exception as e: + print(f"❌ Failed after retries: {e}") + + # Check retry stats + stats = await retry_manager.get_retry_stats("demo-server") + print( + f"Retry stats: total={stats.total_retries}, successful={stats.successful_retries}" + ) + print() + + +async def demonstrate_different_strategies(): + """Demonstrate different backoff strategies.""" + print("=== Backoff Strategies Demonstration ===") + + strategies = ["fixed", "linear", "exponential", "exponential_jitter"] + + for strategy in strategies: + print(f"\n{strategy.upper()} strategy:") + server = MockMCPServer(failure_rate=0.7) # High failure rate + + try: + start_time = asyncio.get_event_loop().time() + + result = await retry_mcp_call( + func=lambda: server.call_tool("read_file", {"path": "/example.txt"}), + server_id=f"server-{strategy}", + max_attempts=3, + strategy=strategy, + ) + + end_time = asyncio.get_event_loop().time() + print(f" ✅ Success: {result}") + print(f" Time taken: {end_time - start_time:.2f}s") + print(f" Call count: {server.call_count}") + except Exception as e: + end_time = asyncio.get_event_loop().time() + print(f" ❌ Failed: {e}") + print(f" Time taken: {end_time - start_time:.2f}s") + print(f" Call count: {server.call_count}") + + +async def demonstrate_concurrent_retries(): + """Demonstrate concurrent retry operations.""" + print("\n=== Concurrent Retries Demonstration ===") + + retry_manager = get_retry_manager() + + # Create multiple servers with different failure rates + servers = [ + ("reliable-server", MockMCPServer(failure_rate=0.1)), + ("unreliable-server", MockMCPServer(failure_rate=0.8)), + ("moderate-server", MockMCPServer(failure_rate=0.4)), + ] + + async def make_call(server_name: str, server: MockMCPServer): + """Make a call with retry handling.""" + try: + await retry_manager.retry_with_backoff( + func=lambda: server.list_tools(), + max_attempts=3, + strategy="exponential_jitter", + server_id=server_name, + ) + return f"{server_name}: Success (calls: {server.call_count})" + except Exception as e: + return f"{server_name}: Failed - {e} (calls: {server.call_count})" + + # Run concurrent calls + tasks = [make_call(name, server) for name, server in servers] + results = await asyncio.gather(*tasks) + + print("Concurrent results:") + for result in results: + print(f" {result}") + + # Show overall stats + print("\nOverall retry statistics:") + all_stats = await retry_manager.get_all_stats() + for server_id, stats in all_stats.items(): + success_rate = (stats.successful_retries / max(stats.total_retries, 1)) * 100 + print( + f" {server_id}: {stats.total_retries} retries, {success_rate:.1f}% success rate" + ) + + +async def demonstrate_error_classification(): + """Demonstrate error classification for retry decisions.""" + print("\n=== Error Classification Demonstration ===") + + retry_manager = get_retry_manager() + + # Test different error types + test_errors = [ + ConnectionError("Network connection failed"), + asyncio.TimeoutError("Request timeout"), + ValueError("JSON decode error: invalid format"), + ValueError("Schema validation failed"), + Exception("Authentication failed"), + Exception("Permission denied"), + ] + + print("Error retry decisions:") + for error in test_errors: + should_retry = retry_manager.should_retry(error) + status = "✅ RETRY" if should_retry else "❌ NO RETRY" + print(f" {type(error).__name__}: {error} → {status}") + + +async def main(): + """Run all demonstrations.""" + print("RetryManager Example Demonstrations") + print("=" * 50) + + await demonstrate_basic_retry() + await demonstrate_different_strategies() + await demonstrate_concurrent_retries() + await demonstrate_error_classification() + + print("\n🎉 All demonstrations completed!") + + +if __name__ == "__main__": + # Set a seed for reproducible results in the demo + random.seed(42) + asyncio.run(main()) diff --git a/code_puppy/mcp_/health_monitor.py b/code_puppy/mcp_/health_monitor.py new file mode 100644 index 00000000..99af470c --- /dev/null +++ b/code_puppy/mcp_/health_monitor.py @@ -0,0 +1,560 @@ +""" +Health monitoring system for MCP servers. + +This module provides continuous health monitoring for MCP servers with +automatic recovery actions when consecutive failures are detected. +""" + +import asyncio +import logging +import time +from collections import defaultdict, deque +from dataclasses import dataclass +from datetime import datetime +from typing import Callable, Dict, List, Optional + +import httpx + +from .managed_server import ManagedMCPServer + +logger = logging.getLogger(__name__) + + +@dataclass +class HealthStatus: + """Status of a health check for an MCP server.""" + + timestamp: datetime + is_healthy: bool + latency_ms: Optional[float] + error: Optional[str] + check_type: str # "ping", "list_tools", "get_request", etc. + + +@dataclass +class HealthCheckResult: + """Result of performing a health check.""" + + success: bool + latency_ms: float + error: Optional[str] + + +class HealthMonitor: + """ + Continuous health monitoring system for MCP servers. + + Features: + - Background monitoring tasks using asyncio + - Server type-specific health checks + - Health history tracking with configurable limit + - Custom health check registration + - Automatic recovery triggering on consecutive failures + - Configurable check intervals + + Example usage: + monitor = HealthMonitor(check_interval=30) + await monitor.start_monitoring("server-1", managed_server) + + # Check current health + is_healthy = monitor.is_healthy("server-1") + + # Get health history + history = monitor.get_health_history("server-1", limit=50) + """ + + def __init__(self, check_interval: int = 30): + """ + Initialize the health monitor. + + Args: + check_interval: Interval between health checks in seconds + """ + self.check_interval = check_interval + self.monitoring_tasks: Dict[str, asyncio.Task] = {} + self.health_history: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000)) + self.custom_health_checks: Dict[str, Callable] = {} + self.consecutive_failures: Dict[str, int] = defaultdict(int) + self.last_check_time: Dict[str, datetime] = {} + + # Register default health checks for each server type + self._register_default_health_checks() + + logger.info(f"Health monitor initialized with {check_interval}s check interval") + + def _register_default_health_checks(self) -> None: + """Register default health check methods for each server type.""" + self.register_health_check("sse", self._check_sse_health) + self.register_health_check("http", self._check_http_health) + self.register_health_check("stdio", self._check_stdio_health) + + async def start_monitoring(self, server_id: str, server: ManagedMCPServer) -> None: + """ + Start continuous health monitoring for a server. + + Args: + server_id: Unique identifier for the server + server: The managed MCP server instance to monitor + """ + if server_id in self.monitoring_tasks: + logger.warning(f"Server {server_id} is already being monitored") + return + + logger.info(f"Starting health monitoring for server {server_id}") + + # Create background monitoring task + task = asyncio.create_task( + self._monitoring_loop(server_id, server), name=f"health_monitor_{server_id}" + ) + self.monitoring_tasks[server_id] = task + + # Perform initial health check + try: + health_status = await self.check_health(server) + self._record_health_status(server_id, health_status) + except Exception as e: + logger.error(f"Initial health check failed for {server_id}: {e}") + error_status = HealthStatus( + timestamp=datetime.now(), + is_healthy=False, + latency_ms=None, + error=str(e), + check_type="initial", + ) + self._record_health_status(server_id, error_status) + + async def stop_monitoring(self, server_id: str) -> None: + """ + Stop health monitoring for a server. + + Args: + server_id: Unique identifier for the server + """ + task = self.monitoring_tasks.pop(server_id, None) + if task: + logger.info(f"Stopping health monitoring for server {server_id}") + task.cancel() + try: + await task + except asyncio.CancelledError: + pass + + # Clean up tracking data + self.consecutive_failures.pop(server_id, None) + self.last_check_time.pop(server_id, None) + else: + logger.warning(f"No monitoring task found for server {server_id}") + + async def check_health(self, server: ManagedMCPServer) -> HealthStatus: + """ + Perform a health check for a server. + + Args: + server: The managed MCP server to check + + Returns: + HealthStatus object with check results + """ + server_type = server.config.type.lower() + check_func = self.custom_health_checks.get(server_type) + + if not check_func: + logger.warning( + f"No health check function registered for server type: {server_type}" + ) + return HealthStatus( + timestamp=datetime.now(), + is_healthy=False, + latency_ms=None, + error=f"No health check registered for type '{server_type}'", + check_type="unknown", + ) + + try: + result = await self.perform_health_check(server) + return HealthStatus( + timestamp=datetime.now(), + is_healthy=result.success, + latency_ms=result.latency_ms, + error=result.error, + check_type=server_type, + ) + except Exception as e: + logger.error(f"Health check failed for server {server.config.id}: {e}") + return HealthStatus( + timestamp=datetime.now(), + is_healthy=False, + latency_ms=None, + error=str(e), + check_type=server_type, + ) + + async def perform_health_check(self, server: ManagedMCPServer) -> HealthCheckResult: + """ + Perform the actual health check based on server type. + + Args: + server: The managed MCP server to check + + Returns: + HealthCheckResult with timing and success information + """ + server_type = server.config.type.lower() + check_func = self.custom_health_checks.get(server_type) + + if not check_func: + return HealthCheckResult( + success=False, + latency_ms=0.0, + error=f"No health check function for type '{server_type}'", + ) + + start_time = time.time() + try: + result = await check_func(server) + latency_ms = (time.time() - start_time) * 1000 + + if isinstance(result, bool): + return HealthCheckResult( + success=result, + latency_ms=latency_ms, + error=None if result else "Health check returned False", + ) + elif isinstance(result, HealthCheckResult): + # Update latency if not already set + if result.latency_ms == 0.0: + result.latency_ms = latency_ms + return result + else: + return HealthCheckResult( + success=False, + latency_ms=latency_ms, + error=f"Invalid health check result type: {type(result)}", + ) + + except Exception as e: + latency_ms = (time.time() - start_time) * 1000 + return HealthCheckResult(success=False, latency_ms=latency_ms, error=str(e)) + + def register_health_check(self, server_type: str, check_func: Callable) -> None: + """ + Register a custom health check function for a server type. + + Args: + server_type: The server type ("sse", "http", "stdio") + check_func: Async function that takes a ManagedMCPServer and returns + bool or HealthCheckResult + """ + self.custom_health_checks[server_type.lower()] = check_func + logger.info(f"Registered health check for server type: {server_type}") + + def get_health_history( + self, server_id: str, limit: int = 100 + ) -> List[HealthStatus]: + """ + Get health check history for a server. + + Args: + server_id: Unique identifier for the server + limit: Maximum number of history entries to return + + Returns: + List of HealthStatus objects, most recent first + """ + history = self.health_history.get(server_id, deque()) + # Convert deque to list and limit results + result = list(history)[-limit:] if limit > 0 else list(history) + # Reverse to get most recent first + result.reverse() + return result + + def is_healthy(self, server_id: str) -> bool: + """ + Check if a server is currently healthy based on latest status. + + Args: + server_id: Unique identifier for the server + + Returns: + True if server is healthy, False otherwise + """ + history = self.health_history.get(server_id) + if not history: + return False + + # Get most recent health status + latest_status = history[-1] + return latest_status.is_healthy + + async def _monitoring_loop(self, server_id: str, server: ManagedMCPServer) -> None: + """ + Main monitoring loop that runs in the background. + + Args: + server_id: Unique identifier for the server + server: The managed MCP server to monitor + """ + logger.info(f"Starting monitoring loop for server {server_id}") + + while True: + try: + # Wait for check interval + await asyncio.sleep(self.check_interval) + + # Skip if server is not enabled + if not server.is_enabled(): + continue + + # Perform health check + health_status = await self.check_health(server) + self._record_health_status(server_id, health_status) + + # Handle consecutive failures + if not health_status.is_healthy: + self.consecutive_failures[server_id] += 1 + logger.warning( + f"Health check failed for {server_id}: {health_status.error} " + f"(consecutive failures: {self.consecutive_failures[server_id]})" + ) + + # Trigger recovery on consecutive failures + await self._handle_consecutive_failures(server_id, server) + else: + # Reset consecutive failure count on success + if self.consecutive_failures[server_id] > 0: + logger.info( + f"Server {server_id} recovered after health check success" + ) + self.consecutive_failures[server_id] = 0 + + self.last_check_time[server_id] = datetime.now() + + except asyncio.CancelledError: + logger.info(f"Monitoring loop cancelled for server {server_id}") + break + except Exception as e: + logger.error(f"Error in monitoring loop for {server_id}: {e}") + # Continue monitoring despite errors + await asyncio.sleep(5) # Brief delay before retrying + + def _record_health_status(self, server_id: str, status: HealthStatus) -> None: + """ + Record a health status in the history. + + Args: + server_id: Unique identifier for the server + status: The health status to record + """ + self.health_history[server_id].append(status) + + # Log health status changes + if status.is_healthy: + logger.debug( + f"Server {server_id} health check passed ({status.latency_ms:.1f}ms)" + ) + else: + logger.warning(f"Server {server_id} health check failed: {status.error}") + + async def _handle_consecutive_failures( + self, server_id: str, server: ManagedMCPServer + ) -> None: + """ + Handle consecutive health check failures. + + Args: + server_id: Unique identifier for the server + server: The managed MCP server + """ + failure_count = self.consecutive_failures[server_id] + + # Trigger recovery actions based on failure count + if failure_count >= 3: + logger.error( + f"Server {server_id} has {failure_count} consecutive failures, triggering recovery" + ) + + try: + # Attempt to recover the server + await self._trigger_recovery(server_id, server, failure_count) + except Exception as e: + logger.error(f"Recovery failed for server {server_id}: {e}") + + # Quarantine server after many consecutive failures + if failure_count >= 5: + logger.critical( + f"Quarantining server {server_id} after {failure_count} consecutive failures" + ) + try: + # Calculate quarantine duration with exponential backoff + quarantine_duration = min( + 30 * (2 ** (failure_count - 5)), 1800 + ) # Max 30 minutes + server.quarantine(quarantine_duration) + except Exception as e: + logger.error(f"Failed to quarantine server {server_id}: {e}") + + async def _trigger_recovery( + self, server_id: str, server: ManagedMCPServer, failure_count: int + ) -> None: + """ + Trigger recovery actions for a failing server. + + Args: + server_id: Unique identifier for the server + server: The managed MCP server + failure_count: Number of consecutive failures + """ + logger.info( + f"Triggering recovery for server {server_id} (failure count: {failure_count})" + ) + + try: + # For now, just disable and re-enable the server + # In the future, this could include more sophisticated recovery actions + server.disable() + await asyncio.sleep(1) # Brief delay + server.enable() + + logger.info(f"Recovery attempt completed for server {server_id}") + + except Exception as e: + logger.error(f"Recovery action failed for server {server_id}: {e}") + raise + + async def _check_sse_health(self, server: ManagedMCPServer) -> HealthCheckResult: + """ + Health check for SSE servers using GET request. + + Args: + server: The managed MCP server to check + + Returns: + HealthCheckResult with check results + """ + try: + config = server.config.config + url = config.get("url") + if not url: + return HealthCheckResult( + success=False, + latency_ms=0.0, + error="No URL configured for SSE server", + ) + + # Add health endpoint if available, otherwise use base URL + health_url = ( + f"{url.rstrip('/')}/health" if not url.endswith("/health") else url + ) + + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(health_url) + + if response.status_code == 404: + # Try base URL if health endpoint doesn't exist + response = await client.get(url) + + success = 200 <= response.status_code < 400 + error = ( + None + if success + else f"HTTP {response.status_code}: {response.reason_phrase}" + ) + + return HealthCheckResult( + success=success, + latency_ms=0.0, # Will be filled by perform_health_check + error=error, + ) + + except Exception as e: + return HealthCheckResult(success=False, latency_ms=0.0, error=str(e)) + + async def _check_http_health(self, server: ManagedMCPServer) -> HealthCheckResult: + """ + Health check for HTTP servers using GET request. + + Args: + server: The managed MCP server to check + + Returns: + HealthCheckResult with check results + """ + # HTTP servers use the same check as SSE servers + return await self._check_sse_health(server) + + async def _check_stdio_health(self, server: ManagedMCPServer) -> HealthCheckResult: + """ + Health check for stdio servers using ping command. + + Args: + server: The managed MCP server to check + + Returns: + HealthCheckResult with check results + """ + try: + # Get the pydantic server instance + server.get_pydantic_server() + + # Try to get available tools as a health check + # This requires the server to be responsive + try: + # Attempt to list tools - this is a good health check for MCP servers + # Note: This is a simplified check. In a real implementation, + # we'd need to send an actual MCP message + + # For now, we'll check if we can create the server instance + # and if it appears to be configured correctly + config = server.config.config + command = config.get("command") + + if not command: + return HealthCheckResult( + success=False, + latency_ms=0.0, + error="No command configured for stdio server", + ) + + # Basic validation that command exists + import shutil + + if not shutil.which(command): + return HealthCheckResult( + success=False, + latency_ms=0.0, + error=f"Command '{command}' not found in PATH", + ) + + # If we get here, basic checks passed + return HealthCheckResult(success=True, latency_ms=0.0, error=None) + + except Exception as e: + return HealthCheckResult( + success=False, + latency_ms=0.0, + error=f"Server communication failed: {str(e)}", + ) + + except Exception as e: + return HealthCheckResult(success=False, latency_ms=0.0, error=str(e)) + + async def shutdown(self) -> None: + """ + Shutdown all monitoring tasks gracefully. + """ + logger.info("Shutting down health monitor") + + # Cancel all monitoring tasks + tasks = list(self.monitoring_tasks.values()) + for task in tasks: + task.cancel() + + # Wait for all tasks to complete + if tasks: + await asyncio.gather(*tasks, return_exceptions=True) + + self.monitoring_tasks.clear() + self.consecutive_failures.clear() + self.last_check_time.clear() + + logger.info("Health monitor shutdown complete") diff --git a/code_puppy/mcp_/managed_server.py b/code_puppy/mcp_/managed_server.py new file mode 100644 index 00000000..6448e7c3 --- /dev/null +++ b/code_puppy/mcp_/managed_server.py @@ -0,0 +1,402 @@ +""" +ManagedMCPServer wrapper class implementation. + +This module provides a managed wrapper around pydantic-ai MCP server classes +that adds management capabilities while maintaining 100% compatibility. +""" + +import json +import logging +import uuid +from dataclasses import dataclass, field +from datetime import datetime, timedelta +from enum import Enum +from typing import Any, Dict, Optional, Union + +import httpx +from pydantic_ai import RunContext +from pydantic_ai.mcp import ( + CallToolFunc, + MCPServerSSE, + MCPServerStdio, + MCPServerStreamableHTTP, + ToolResult, +) + +from code_puppy.http_utils import create_async_client +from code_puppy.mcp_.blocking_startup import BlockingMCPServerStdio +from code_puppy.messaging import emit_info + +# Configure logging +logger = logging.getLogger(__name__) + + +class ServerState(Enum): + """Enumeration of possible server states.""" + + STOPPED = "stopped" + STARTING = "starting" + RUNNING = "running" + STOPPING = "stopping" + ERROR = "error" + QUARANTINED = "quarantined" + + +@dataclass +class ServerConfig: + """Configuration for an MCP server.""" + + id: str + name: str + type: str # "sse", "stdio", or "http" + enabled: bool = True + config: Dict = field(default_factory=dict) # Raw config from JSON + + +async def process_tool_call( + ctx: RunContext[Any], + call_tool: CallToolFunc, + name: str, + tool_args: dict[str, Any], +) -> ToolResult: + """A tool call processor that passes along the deps.""" + group_id = uuid.uuid4() + emit_info( + f"\n[bold white on purple] MCP Tool Call - {name}[/bold white on purple]", + message_group=group_id, + ) + emit_info("\nArgs:", message_group=group_id) + emit_info(json.dumps(tool_args, indent=2), message_group=group_id) + return await call_tool(name, tool_args, {"deps": ctx.deps}) + + +class ManagedMCPServer: + """ + Managed wrapper around pydantic-ai MCP server classes. + + This class provides management capabilities like enable/disable, + quarantine, and status tracking while maintaining 100% compatibility + with the existing Agent interface through get_pydantic_server(). + + Example usage: + config = ServerConfig( + id="123", + name="test", + type="sse", + config={"url": "http://localhost:8080"} + ) + managed = ManagedMCPServer(config) + pydantic_server = managed.get_pydantic_server() # Returns actual MCPServerSSE + """ + + def __init__(self, server_config: ServerConfig): + """ + Initialize managed server with configuration. + + Args: + server_config: Server configuration containing type, connection details, etc. + """ + self.config = server_config + self._pydantic_server: Optional[ + Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP] + ] = None + self._state = ServerState.STOPPED + # Always start disabled - servers must be explicitly started with /mcp start + self._enabled = False + self._quarantine_until: Optional[datetime] = None + self._start_time: Optional[datetime] = None + self._stop_time: Optional[datetime] = None + self._error_message: Optional[str] = None + + # Initialize the pydantic server + try: + self._create_server() + # Always start as STOPPED - servers must be explicitly started + self._state = ServerState.STOPPED + except Exception as e: + logger.error(f"Failed to create server {self.config.name}: {e}") + self._state = ServerState.ERROR + self._error_message = str(e) + + def get_pydantic_server( + self, + ) -> Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP]: + """ + Get the actual pydantic-ai server instance. + + This method returns the real pydantic-ai MCP server objects for 100% compatibility + with the existing Agent interface. Do not return custom classes or proxies. + + Returns: + Actual pydantic-ai MCP server instance (MCPServerSSE, MCPServerStdio, or MCPServerStreamableHTTP) + + Raises: + RuntimeError: If server creation failed or server is not available + """ + if self._pydantic_server is None: + raise RuntimeError(f"Server {self.config.name} is not available") + + if not self.is_enabled() or self.is_quarantined(): + raise RuntimeError(f"Server {self.config.name} is disabled or quarantined") + + return self._pydantic_server + + def _create_server(self) -> None: + """ + Create appropriate pydantic-ai server based on config type. + + Raises: + ValueError: If server type is unsupported or config is invalid + Exception: If server creation fails + """ + server_type = self.config.type.lower() + config = self.config.config + + try: + if server_type == "sse": + if "url" not in config: + raise ValueError("SSE server requires 'url' in config") + + # Prepare arguments for MCPServerSSE + sse_kwargs = { + "url": config["url"], + } + + # Add optional parameters if provided + if "timeout" in config: + sse_kwargs["timeout"] = config["timeout"] + if "read_timeout" in config: + sse_kwargs["read_timeout"] = config["read_timeout"] + if "http_client" in config: + sse_kwargs["http_client"] = config["http_client"] + elif config.get("headers"): + # Create HTTP client if headers are provided but no client specified + sse_kwargs["http_client"] = self._get_http_client() + + self._pydantic_server = MCPServerSSE( + **sse_kwargs, process_tool_call=process_tool_call + ) + + elif server_type == "stdio": + if "command" not in config: + raise ValueError("Stdio server requires 'command' in config") + + # Handle command and arguments + command = config["command"] + args = config.get("args", []) + if isinstance(args, str): + # If args is a string, split it + args = args.split() + + # Prepare arguments for MCPServerStdio + stdio_kwargs = {"command": command, "args": list(args) if args else []} + + # Add optional parameters if provided + if "env" in config: + stdio_kwargs["env"] = config["env"] + if "cwd" in config: + stdio_kwargs["cwd"] = config["cwd"] + if "timeout" in config: + stdio_kwargs["timeout"] = config["timeout"] + if "read_timeout" in config: + stdio_kwargs["read_timeout"] = config["read_timeout"] + + # Use BlockingMCPServerStdio for proper initialization blocking and stderr capture + # Create a unique message group for this server + message_group = uuid.uuid4() + self._pydantic_server = BlockingMCPServerStdio( + **stdio_kwargs, + process_tool_call=process_tool_call, + tool_prefix=config["name"], + emit_stderr=True, # Always emit stderr for now + message_group=message_group, + ) + + elif server_type == "http": + if "url" not in config: + raise ValueError("HTTP server requires 'url' in config") + + # Prepare arguments for MCPServerStreamableHTTP + http_kwargs = { + "url": config["url"], + } + + # Add optional parameters if provided + if "timeout" in config: + http_kwargs["timeout"] = config["timeout"] + if "read_timeout" in config: + http_kwargs["read_timeout"] = config["read_timeout"] + if "headers" in config: + http_kwargs["headers"] = config.get("headers") + # Create HTTP client if headers are provided but no client specified + + self._pydantic_server = MCPServerStreamableHTTP( + **http_kwargs, process_tool_call=process_tool_call + ) + + else: + raise ValueError(f"Unsupported server type: {server_type}") + + logger.info(f"Created {server_type} server: {self.config.name}") + + except Exception as e: + logger.error( + f"Failed to create {server_type} server {self.config.name}: {e}" + ) + raise + + def _get_http_client(self) -> httpx.AsyncClient: + """ + Create httpx.AsyncClient with headers from config. + + Returns: + Configured async HTTP client with custom headers + """ + headers = self.config.config.get("headers", {}) + timeout = self.config.config.get("timeout", 30) + client = create_async_client(headers=headers, timeout=timeout) + return client + + def enable(self) -> None: + """Enable server availability.""" + self._enabled = True + if self._state == ServerState.STOPPED and self._pydantic_server is not None: + self._state = ServerState.RUNNING + self._start_time = datetime.now() + logger.info(f"Enabled server: {self.config.name}") + + def disable(self) -> None: + """Disable server availability.""" + self._enabled = False + if self._state == ServerState.RUNNING: + self._state = ServerState.STOPPED + self._stop_time = datetime.now() + logger.info(f"Disabled server: {self.config.name}") + + def is_enabled(self) -> bool: + """ + Check if server is enabled. + + Returns: + True if server is enabled, False otherwise + """ + return self._enabled + + def quarantine(self, duration: int) -> None: + """ + Temporarily disable server for specified duration. + + Args: + duration: Quarantine duration in seconds + """ + self._quarantine_until = datetime.now() + timedelta(seconds=duration) + previous_state = self._state + self._state = ServerState.QUARANTINED + logger.warning( + f"Quarantined server {self.config.name} for {duration} seconds " + f"(was {previous_state.value})" + ) + + def is_quarantined(self) -> bool: + """ + Check if server is currently quarantined. + + Returns: + True if server is quarantined, False otherwise + """ + if self._quarantine_until is None: + return False + + if datetime.now() >= self._quarantine_until: + # Quarantine period has expired + self._quarantine_until = None + if self._state == ServerState.QUARANTINED: + # Restore to running state if enabled + self._state = ( + ServerState.RUNNING if self._enabled else ServerState.STOPPED + ) + logger.info(f"Released quarantine for server: {self.config.name}") + return False + + return True + + def get_captured_stderr(self) -> list[str]: + """ + Get captured stderr output if this is a stdio server. + + Returns: + List of captured stderr lines, or empty list if not applicable + """ + if isinstance(self._pydantic_server, BlockingMCPServerStdio): + return self._pydantic_server.get_captured_stderr() + return [] + + async def wait_until_ready(self, timeout: float = 30.0) -> bool: + """ + Wait until the server is ready. + + Args: + timeout: Maximum time to wait in seconds + + Returns: + True if server is ready, False otherwise + """ + if isinstance(self._pydantic_server, BlockingMCPServerStdio): + try: + await self._pydantic_server.wait_until_ready(timeout) + return True + except Exception: + return False + # Non-stdio servers are considered ready immediately + return True + + async def ensure_ready(self, timeout: float = 30.0): + """ + Ensure server is ready, raising exception if not. + + Args: + timeout: Maximum time to wait in seconds + + Raises: + TimeoutError: If server doesn't initialize within timeout + Exception: If server initialization failed + """ + if isinstance(self._pydantic_server, BlockingMCPServerStdio): + await self._pydantic_server.ensure_ready(timeout) + + def get_status(self) -> Dict[str, Any]: + """ + Return current status information. + + Returns: + Dictionary containing comprehensive status information + """ + now = datetime.now() + uptime = None + if self._start_time and self._state == ServerState.RUNNING: + uptime = (now - self._start_time).total_seconds() + + quarantine_remaining = None + if self.is_quarantined(): + quarantine_remaining = (self._quarantine_until - now).total_seconds() + + return { + "id": self.config.id, + "name": self.config.name, + "type": self.config.type, + "state": self._state.value, + "enabled": self._enabled, + "quarantined": self.is_quarantined(), + "quarantine_remaining_seconds": quarantine_remaining, + "uptime_seconds": uptime, + "start_time": self._start_time.isoformat() if self._start_time else None, + "stop_time": self._stop_time.isoformat() if self._stop_time else None, + "error_message": self._error_message, + "config": self.config.config.copy(), # Copy to prevent modification + "server_available": ( + self._pydantic_server is not None + and self._enabled + and not self.is_quarantined() + and self._state == ServerState.RUNNING + ), + } diff --git a/code_puppy/mcp_/manager.py b/code_puppy/mcp_/manager.py new file mode 100644 index 00000000..5d085693 --- /dev/null +++ b/code_puppy/mcp_/manager.py @@ -0,0 +1,713 @@ +""" +MCPManager - Central coordinator for all MCP server operations. + +This module provides the main MCPManager class that coordinates all MCP server +operations while maintaining pydantic-ai compatibility. It serves as the central +point for managing servers, registering configurations, and providing servers +to agents. +""" + +import asyncio +import logging +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Dict, List, Optional, Union + +from pydantic_ai.mcp import MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP + +from .async_lifecycle import get_lifecycle_manager +from .managed_server import ManagedMCPServer, ServerConfig, ServerState +from .registry import ServerRegistry +from .status_tracker import ServerStatusTracker + +# Configure logging +logger = logging.getLogger(__name__) + + +@dataclass +class ServerInfo: + """Information about a registered server.""" + + id: str + name: str + type: str + enabled: bool + state: ServerState + quarantined: bool + uptime_seconds: Optional[float] + error_message: Optional[str] + health: Optional[Dict[str, Any]] = None + start_time: Optional[datetime] = None + latency_ms: Optional[float] = None + + +class MCPManager: + """ + Central coordinator for all MCP server operations. + + This class manages the lifecycle of MCP servers while maintaining + 100% pydantic-ai compatibility. It coordinates between the registry, + status tracker, and managed servers to provide a unified interface + for server management. + + The critical method get_servers_for_agent() returns actual pydantic-ai + server instances for use with Agent objects. + + Example usage: + manager = get_mcp_manager() + + # Register a server + config = ServerConfig( + id="", # Auto-generated + name="filesystem", + type="stdio", + config={"command": "npx", "args": ["-y", "@modelcontextprotocol/server-filesystem"]} + ) + server_id = manager.register_server(config) + + # Get servers for agent use + servers = manager.get_servers_for_agent() # Returns actual pydantic-ai instances + """ + + def __init__(self): + """Initialize the MCP manager with all required components.""" + # Initialize core components + self.registry = ServerRegistry() + self.status_tracker = ServerStatusTracker() + + # Active managed servers (server_id -> ManagedMCPServer) + self._managed_servers: Dict[str, ManagedMCPServer] = {} + + # Load existing servers from registry + self._initialize_servers() + + logger.info("MCPManager initialized with core components") + + def _initialize_servers(self) -> None: + """Initialize managed servers from registry configurations.""" + configs = self.registry.list_all() + initialized_count = 0 + + for config in configs: + try: + managed_server = ManagedMCPServer(config) + self._managed_servers[config.id] = managed_server + + # Update status tracker - always start as STOPPED + # Servers must be explicitly started with /mcp start + self.status_tracker.set_status(config.id, ServerState.STOPPED) + + initialized_count += 1 + logger.debug( + f"Initialized managed server: {config.name} (ID: {config.id})" + ) + + except Exception as e: + logger.error(f"Failed to initialize server {config.name}: {e}") + # Update status tracker with error state + self.status_tracker.set_status(config.id, ServerState.ERROR) + self.status_tracker.record_event( + config.id, + "initialization_error", + {"error": str(e), "message": f"Failed to initialize: {e}"}, + ) + + logger.info(f"Initialized {initialized_count} servers from registry") + + def register_server(self, config: ServerConfig) -> str: + """ + Register a new server configuration. + + Args: + config: Server configuration to register + + Returns: + Server ID of the registered server + + Raises: + ValueError: If configuration is invalid or server already exists + Exception: If server initialization fails + """ + # Register with registry (validates config and assigns ID) + server_id = self.registry.register(config) + + try: + # Create managed server instance + managed_server = ManagedMCPServer(config) + self._managed_servers[server_id] = managed_server + + # Update status tracker - always start as STOPPED + # Servers must be explicitly started with /mcp start + self.status_tracker.set_status(server_id, ServerState.STOPPED) + + # Record registration event + self.status_tracker.record_event( + server_id, + "registered", + { + "name": config.name, + "type": config.type, + "message": "Server registered successfully", + }, + ) + + logger.info( + f"Successfully registered server: {config.name} (ID: {server_id})" + ) + return server_id + + except Exception as e: + # Remove from registry if initialization failed + self.registry.unregister(server_id) + logger.error(f"Failed to initialize registered server {config.name}: {e}") + raise + + def get_servers_for_agent( + self, + ) -> List[Union[MCPServerSSE, MCPServerStdio, MCPServerStreamableHTTP]]: + """ + Get pydantic-ai compatible servers for agent use. + + This is the critical method that must return actual pydantic-ai server + instances (not wrappers). Only returns enabled, non-quarantined servers. + Handles errors gracefully by logging but not crashing. + + Returns: + List of actual pydantic-ai MCP server instances ready for use + """ + servers = [] + + for server_id, managed_server in self._managed_servers.items(): + try: + # Only include enabled, non-quarantined servers + if managed_server.is_enabled() and not managed_server.is_quarantined(): + # Get the actual pydantic-ai server instance + pydantic_server = managed_server.get_pydantic_server() + servers.append(pydantic_server) + + logger.debug( + f"Added server to agent list: {managed_server.config.name}" + ) + else: + logger.debug( + f"Skipping server {managed_server.config.name}: " + f"enabled={managed_server.is_enabled()}, " + f"quarantined={managed_server.is_quarantined()}" + ) + + except Exception as e: + # Log error but don't crash - continue with other servers + logger.error( + f"Error getting server {managed_server.config.name} for agent: {e}" + ) + # Record error event + self.status_tracker.record_event( + server_id, + "agent_access_error", + { + "error": str(e), + "message": f"Error accessing server for agent: {e}", + }, + ) + continue + + logger.debug(f"Returning {len(servers)} servers for agent use") + return servers + + def get_server(self, server_id: str) -> Optional[ManagedMCPServer]: + """ + Get managed server by ID. + + Args: + server_id: ID of server to retrieve + + Returns: + ManagedMCPServer instance if found, None otherwise + """ + return self._managed_servers.get(server_id) + + def get_server_by_name(self, name: str) -> Optional[ServerConfig]: + """ + Get server configuration by name. + + Args: + name: Name of server to retrieve + + Returns: + ServerConfig if found, None otherwise + """ + return self.registry.get_by_name(name) + + def update_server(self, server_id: str, config: ServerConfig) -> bool: + """ + Update server configuration. + + Args: + server_id: ID of server to update + config: New configuration + + Returns: + True if server was updated, False if not found + """ + # Update in registry + if not self.registry.update(server_id, config): + return False + + # Update managed server if it exists + managed_server = self._managed_servers.get(server_id) + if managed_server: + managed_server.config = config + # Clear cached server to force recreation on next use + managed_server.server = None + logger.info(f"Updated server configuration: {config.name}") + + return True + + def list_servers(self) -> List[ServerInfo]: + """ + Get information about all registered servers. + + Returns: + List of ServerInfo objects with current status + """ + server_infos = [] + + for server_id, managed_server in self._managed_servers.items(): + try: + status = managed_server.get_status() + uptime = self.status_tracker.get_uptime(server_id) + summary = self.status_tracker.get_server_summary(server_id) + + # Get health information from metadata + health_info = self.status_tracker.get_metadata(server_id, "health") + if health_info is None: + # Create basic health info based on state + health_info = { + "is_healthy": status["state"] == "running", + "error": status.get("error_message"), + } + + # Get latency from metadata + latency_ms = self.status_tracker.get_metadata(server_id, "latency_ms") + + server_info = ServerInfo( + id=server_id, + name=managed_server.config.name, + type=managed_server.config.type, + enabled=managed_server.is_enabled(), + state=ServerState(status["state"]), + quarantined=managed_server.is_quarantined(), + uptime_seconds=uptime.total_seconds() if uptime else None, + error_message=status.get("error_message"), + health=health_info, + start_time=summary.get("start_time"), + latency_ms=latency_ms, + ) + + server_infos.append(server_info) + + except Exception as e: + logger.error(f"Error getting info for server {server_id}: {e}") + # Create error info + config = self.registry.get(server_id) + if config: + server_info = ServerInfo( + id=server_id, + name=config.name, + type=config.type, + enabled=False, + state=ServerState.ERROR, + quarantined=False, + uptime_seconds=None, + error_message=str(e), + health={"is_healthy": False, "error": str(e)}, + start_time=None, + latency_ms=None, + ) + server_infos.append(server_info) + + return server_infos + + async def start_server(self, server_id: str) -> bool: + """ + Start a server (enable it and start the subprocess/connection). + + This both enables the server for agent use AND starts the actual process. + For stdio servers, this starts the subprocess. + For SSE/HTTP servers, this establishes the connection. + + Args: + server_id: ID of server to start + + Returns: + True if server was started, False if not found or failed + """ + managed_server = self._managed_servers.get(server_id) + if managed_server is None: + logger.warning(f"Attempted to start non-existent server: {server_id}") + return False + + try: + # First enable the server + managed_server.enable() + self.status_tracker.set_status(server_id, ServerState.RUNNING) + self.status_tracker.record_start_time(server_id) + + # Try to actually start it if we have an async context + try: + # Get the pydantic-ai server instance + pydantic_server = managed_server.get_pydantic_server() + + # Start the server using the async lifecycle manager + lifecycle_mgr = get_lifecycle_manager() + started = await lifecycle_mgr.start_server(server_id, pydantic_server) + + if started: + logger.info( + f"Started server process: {managed_server.config.name} (ID: {server_id})" + ) + self.status_tracker.record_event( + server_id, + "started", + {"message": "Server started and process running"}, + ) + else: + logger.warning( + f"Could not start process for server {server_id}, but it's enabled" + ) + self.status_tracker.record_event( + server_id, + "enabled", + {"message": "Server enabled (process will start when used)"}, + ) + except Exception as e: + # Process start failed, but server is still enabled + logger.warning(f"Could not start process for server {server_id}: {e}") + self.status_tracker.record_event( + server_id, + "enabled", + {"message": "Server enabled (process will start when used)"}, + ) + + return True + + except Exception as e: + logger.error(f"Failed to start server {server_id}: {e}") + self.status_tracker.set_status(server_id, ServerState.ERROR) + self.status_tracker.record_event( + server_id, + "start_error", + {"error": str(e), "message": f"Error starting server: {e}"}, + ) + return False + + def start_server_sync(self, server_id: str) -> bool: + """ + Synchronous wrapper for start_server. + """ + try: + asyncio.get_running_loop() + # We're in an async context, but we need to wait for completion + # Create a future and schedule the coroutine + + # Use run_in_executor to run the async function synchronously + async def run_async(): + return await self.start_server(server_id) + + # Schedule the task and wait briefly for it to complete + task = asyncio.create_task(run_async()) + + # Give it a moment to complete - this fixes the race condition + import time + + time.sleep(0.1) # Small delay to let async tasks progress + + # Check if task completed, if not, fall back to sync enable + if task.done(): + try: + result = task.result() + return result + except Exception: + pass + + # If async didn't complete, enable synchronously + managed_server = self._managed_servers.get(server_id) + if managed_server: + managed_server.enable() + self.status_tracker.set_status(server_id, ServerState.RUNNING) + self.status_tracker.record_start_time(server_id) + logger.info(f"Enabled server synchronously: {server_id}") + return True + return False + + except RuntimeError: + # No async loop, just enable the server + managed_server = self._managed_servers.get(server_id) + if managed_server: + managed_server.enable() + self.status_tracker.set_status(server_id, ServerState.RUNNING) + self.status_tracker.record_start_time(server_id) + logger.info(f"Enabled server (no async context): {server_id}") + return True + return False + + async def stop_server(self, server_id: str) -> bool: + """ + Stop a server (disable it and stop the subprocess/connection). + + This both disables the server AND stops any running process. + For stdio servers, this stops the subprocess. + For SSE/HTTP servers, this closes the connection. + + Args: + server_id: ID of server to stop + + Returns: + True if server was stopped, False if not found + """ + managed_server = self._managed_servers.get(server_id) + if managed_server is None: + logger.warning(f"Attempted to stop non-existent server: {server_id}") + return False + + try: + # First disable the server + managed_server.disable() + self.status_tracker.set_status(server_id, ServerState.STOPPED) + self.status_tracker.record_stop_time(server_id) + + # Try to actually stop it if we have an async context + try: + # Stop the server using the async lifecycle manager + lifecycle_mgr = get_lifecycle_manager() + stopped = await lifecycle_mgr.stop_server(server_id) + + if stopped: + logger.info( + f"Stopped server process: {managed_server.config.name} (ID: {server_id})" + ) + self.status_tracker.record_event( + server_id, + "stopped", + {"message": "Server stopped and process terminated"}, + ) + else: + logger.info(f"Server {server_id} disabled (no process was running)") + self.status_tracker.record_event( + server_id, "disabled", {"message": "Server disabled"} + ) + except Exception as e: + # Process stop failed, but server is still disabled + logger.warning(f"Could not stop process for server {server_id}: {e}") + self.status_tracker.record_event( + server_id, "disabled", {"message": "Server disabled"} + ) + + return True + + except Exception as e: + logger.error(f"Failed to stop server {server_id}: {e}") + self.status_tracker.record_event( + server_id, + "stop_error", + {"error": str(e), "message": f"Error stopping server: {e}"}, + ) + return False + + def stop_server_sync(self, server_id: str) -> bool: + """ + Synchronous wrapper for stop_server. + """ + try: + asyncio.get_running_loop() + + # We're in an async context, but we need to wait for completion + async def run_async(): + return await self.stop_server(server_id) + + # Schedule the task and wait briefly for it to complete + task = asyncio.create_task(run_async()) + + # Give it a moment to complete - this fixes the race condition + import time + + time.sleep(0.1) # Small delay to let async tasks progress + + # Check if task completed, if not, fall back to sync disable + if task.done(): + try: + result = task.result() + return result + except Exception: + pass + + # If async didn't complete, disable synchronously + managed_server = self._managed_servers.get(server_id) + if managed_server: + managed_server.disable() + self.status_tracker.set_status(server_id, ServerState.STOPPED) + self.status_tracker.record_stop_time(server_id) + logger.info(f"Disabled server synchronously: {server_id}") + return True + return False + + except RuntimeError: + # No async loop, just disable the server + managed_server = self._managed_servers.get(server_id) + if managed_server: + managed_server.disable() + self.status_tracker.set_status(server_id, ServerState.STOPPED) + self.status_tracker.record_stop_time(server_id) + logger.info(f"Disabled server (no async context): {server_id}") + return True + return False + + def reload_server(self, server_id: str) -> bool: + """ + Reload a server configuration. + + Args: + server_id: ID of server to reload + + Returns: + True if server was reloaded, False if not found or failed + """ + config = self.registry.get(server_id) + if config is None: + logger.warning(f"Attempted to reload non-existent server: {server_id}") + return False + + try: + # Remove old managed server + if server_id in self._managed_servers: + old_server = self._managed_servers[server_id] + logger.debug(f"Removing old server instance: {old_server.config.name}") + del self._managed_servers[server_id] + + # Create new managed server + managed_server = ManagedMCPServer(config) + self._managed_servers[server_id] = managed_server + + # Update status tracker - always start as STOPPED + # Servers must be explicitly started with /mcp start + self.status_tracker.set_status(server_id, ServerState.STOPPED) + + # Record reload event + self.status_tracker.record_event( + server_id, "reloaded", {"message": "Server configuration reloaded"} + ) + + logger.info(f"Reloaded server: {config.name} (ID: {server_id})") + return True + + except Exception as e: + logger.error(f"Failed to reload server {server_id}: {e}") + self.status_tracker.set_status(server_id, ServerState.ERROR) + self.status_tracker.record_event( + server_id, + "reload_error", + {"error": str(e), "message": f"Error reloading server: {e}"}, + ) + return False + + def remove_server(self, server_id: str) -> bool: + """ + Remove a server completely. + + Args: + server_id: ID of server to remove + + Returns: + True if server was removed, False if not found + """ + # Get server name for logging + config = self.registry.get(server_id) + server_name = config.name if config else server_id + + # Remove from registry + registry_removed = self.registry.unregister(server_id) + + # Remove from managed servers + managed_removed = False + if server_id in self._managed_servers: + del self._managed_servers[server_id] + managed_removed = True + + # Record removal event if server existed + if registry_removed or managed_removed: + self.status_tracker.record_event( + server_id, "removed", {"message": "Server removed"} + ) + logger.info(f"Removed server: {server_name} (ID: {server_id})") + return True + else: + logger.warning(f"Attempted to remove non-existent server: {server_id}") + return False + + def get_server_status(self, server_id: str) -> Dict[str, Any]: + """ + Get comprehensive status for a server. + + Args: + server_id: ID of server to get status for + + Returns: + Dictionary containing comprehensive status information + """ + # Get basic status from managed server + managed_server = self._managed_servers.get(server_id) + if managed_server is None: + return { + "server_id": server_id, + "exists": False, + "error": "Server not found", + } + + try: + # Get status from managed server + status = managed_server.get_status() + + # Add status tracker information + tracker_summary = self.status_tracker.get_server_summary(server_id) + recent_events = self.status_tracker.get_events(server_id, limit=5) + + # Combine all information + comprehensive_status = { + **status, # Include all managed server status + "tracker_state": tracker_summary["state"], + "tracker_metadata": tracker_summary["metadata"], + "recent_events_count": tracker_summary["recent_events_count"], + "tracker_uptime": tracker_summary["uptime"], + "last_event_time": tracker_summary["last_event_time"], + "recent_events": [ + { + "timestamp": event.timestamp.isoformat(), + "event_type": event.event_type, + "details": event.details, + } + for event in recent_events + ], + } + + return comprehensive_status + + except Exception as e: + logger.error(f"Error getting status for server {server_id}: {e}") + return {"server_id": server_id, "exists": True, "error": str(e)} + + +# Singleton instance +_manager_instance: Optional[MCPManager] = None + + +def get_mcp_manager() -> MCPManager: + """ + Get the singleton MCPManager instance. + + Returns: + The global MCPManager instance + """ + global _manager_instance + if _manager_instance is None: + _manager_instance = MCPManager() + return _manager_instance diff --git a/code_puppy/mcp_/registry.py b/code_puppy/mcp_/registry.py new file mode 100644 index 00000000..d84af388 --- /dev/null +++ b/code_puppy/mcp_/registry.py @@ -0,0 +1,450 @@ +""" +ServerRegistry implementation for managing MCP server configurations. + +This module provides a registry that tracks all MCP server configurations +and provides thread-safe CRUD operations with JSON persistence. +""" + +import json +import logging +import threading +import uuid +from pathlib import Path +from typing import Dict, List, Optional + +from .managed_server import ServerConfig + +# Configure logging +logger = logging.getLogger(__name__) + + +class ServerRegistry: + """ + Registry for managing MCP server configurations. + + Provides CRUD operations for server configurations with thread-safe access, + validation, and persistent storage to ~/.code_puppy/mcp_registry.json. + + All operations are thread-safe and use JSON serialization for ServerConfig objects. + Handles file not existing gracefully and validates configurations according to + server type requirements. + """ + + def __init__(self, storage_path: Optional[str] = None): + """ + Initialize the server registry. + + Args: + storage_path: Optional custom path for registry storage. + Defaults to ~/.code_puppy/mcp_registry.json + """ + if storage_path is None: + home_dir = Path.home() + code_puppy_dir = home_dir / ".code_puppy" + code_puppy_dir.mkdir(exist_ok=True) + self._storage_path = code_puppy_dir / "mcp_registry.json" + else: + self._storage_path = Path(storage_path) + + # Thread safety lock (reentrant) + self._lock = threading.RLock() + + # In-memory storage: server_id -> ServerConfig + self._servers: Dict[str, ServerConfig] = {} + + # Load existing configurations + self._load() + + logger.info(f"Initialized ServerRegistry with storage at {self._storage_path}") + + def register(self, config: ServerConfig) -> str: + """ + Add new server configuration. + + Args: + config: Server configuration to register + + Returns: + Server ID of the registered server + + Raises: + ValueError: If validation fails or server already exists + """ + with self._lock: + # Validate configuration + validation_errors = self.validate_config(config) + if validation_errors: + raise ValueError(f"Validation failed: {'; '.join(validation_errors)}") + + # Generate ID if not provided or ensure uniqueness + if not config.id: + config.id = str(uuid.uuid4()) + elif config.id in self._servers: + raise ValueError(f"Server with ID {config.id} already exists") + + # Check name uniqueness + existing_config = self.get_by_name(config.name) + if existing_config and existing_config.id != config.id: + raise ValueError(f"Server with name '{config.name}' already exists") + + # Store configuration + self._servers[config.id] = config + + # Persist to disk + self._persist() + + logger.info(f"Registered server: {config.name} (ID: {config.id})") + return config.id + + def unregister(self, server_id: str) -> bool: + """ + Remove server configuration. + + Args: + server_id: ID of server to remove + + Returns: + True if server was removed, False if not found + """ + with self._lock: + if server_id not in self._servers: + logger.warning( + f"Attempted to unregister non-existent server: {server_id}" + ) + return False + + server_name = self._servers[server_id].name + del self._servers[server_id] + + # Persist to disk + self._persist() + + logger.info(f"Unregistered server: {server_name} (ID: {server_id})") + return True + + def get(self, server_id: str) -> Optional[ServerConfig]: + """ + Get server configuration by ID. + + Args: + server_id: ID of server to retrieve + + Returns: + ServerConfig if found, None otherwise + """ + with self._lock: + return self._servers.get(server_id) + + def get_by_name(self, name: str) -> Optional[ServerConfig]: + """ + Get server configuration by name. + + Args: + name: Name of server to retrieve + + Returns: + ServerConfig if found, None otherwise + """ + with self._lock: + for config in self._servers.values(): + if config.name == name: + return config + return None + + def list_all(self) -> List[ServerConfig]: + """ + Get all server configurations. + + Returns: + List of all ServerConfig objects + """ + with self._lock: + return list(self._servers.values()) + + def update(self, server_id: str, config: ServerConfig) -> bool: + """ + Update existing server configuration. + + Args: + server_id: ID of server to update + config: New configuration + + Returns: + True if update succeeded, False if server not found + + Raises: + ValueError: If validation fails + """ + with self._lock: + if server_id not in self._servers: + logger.warning(f"Attempted to update non-existent server: {server_id}") + return False + + # Ensure the ID matches + config.id = server_id + + # Validate configuration + validation_errors = self.validate_config(config) + if validation_errors: + raise ValueError(f"Validation failed: {'; '.join(validation_errors)}") + + # Check name uniqueness (excluding current server) + existing_config = self.get_by_name(config.name) + if existing_config and existing_config.id != server_id: + raise ValueError(f"Server with name '{config.name}' already exists") + + # Update configuration + old_name = self._servers[server_id].name + self._servers[server_id] = config + + # Persist to disk + self._persist() + + logger.info( + f"Updated server: {old_name} -> {config.name} (ID: {server_id})" + ) + return True + + def exists(self, server_id: str) -> bool: + """ + Check if server exists. + + Args: + server_id: ID of server to check + + Returns: + True if server exists, False otherwise + """ + with self._lock: + return server_id in self._servers + + def validate_config(self, config: ServerConfig) -> List[str]: + """ + Validate server configuration. + + Args: + config: Configuration to validate + + Returns: + List of validation error messages (empty if valid) + """ + errors = [] + + # Basic validation + if not config.name or not config.name.strip(): + errors.append("Server name is required") + elif not config.name.replace("-", "").replace("_", "").isalnum(): + errors.append( + "Server name must be alphanumeric (hyphens and underscores allowed)" + ) + + if not config.type: + errors.append("Server type is required") + elif config.type.lower() not in ["sse", "stdio", "http"]: + errors.append("Server type must be one of: sse, stdio, http") + + if not isinstance(config.config, dict): + errors.append("Server config must be a dictionary") + return errors # Can't validate further without valid config dict + + # Type-specific validation + server_type = config.type.lower() + server_config = config.config + + if server_type in ["sse", "http"]: + if "url" not in server_config: + errors.append(f"{server_type.upper()} server requires 'url' in config") + elif ( + not isinstance(server_config["url"], str) + or not server_config["url"].strip() + ): + errors.append( + f"{server_type.upper()} server URL must be a non-empty string" + ) + elif not ( + server_config["url"].startswith("http://") + or server_config["url"].startswith("https://") + ): + errors.append( + f"{server_type.upper()} server URL must start with http:// or https://" + ) + + # Optional parameter validation + if "timeout" in server_config: + try: + timeout = float(server_config["timeout"]) + if timeout <= 0: + errors.append("Timeout must be positive") + except (ValueError, TypeError): + errors.append("Timeout must be a number") + + if "read_timeout" in server_config: + try: + read_timeout = float(server_config["read_timeout"]) + if read_timeout <= 0: + errors.append("Read timeout must be positive") + except (ValueError, TypeError): + errors.append("Read timeout must be a number") + + if "headers" in server_config: + if not isinstance(server_config["headers"], dict): + errors.append("Headers must be a dictionary") + + elif server_type == "stdio": + if "command" not in server_config: + errors.append("Stdio server requires 'command' in config") + elif ( + not isinstance(server_config["command"], str) + or not server_config["command"].strip() + ): + errors.append("Stdio server command must be a non-empty string") + + # Optional parameter validation + if "args" in server_config: + args = server_config["args"] + if not isinstance(args, (list, str)): + errors.append("Args must be a list or string") + elif isinstance(args, list): + if not all(isinstance(arg, str) for arg in args): + errors.append("All args must be strings") + + if "env" in server_config: + if not isinstance(server_config["env"], dict): + errors.append("Environment variables must be a dictionary") + elif not all( + isinstance(k, str) and isinstance(v, str) + for k, v in server_config["env"].items() + ): + errors.append("All environment variables must be strings") + + if "cwd" in server_config: + if not isinstance(server_config["cwd"], str): + errors.append("Working directory must be a string") + + return errors + + def _persist(self) -> None: + """ + Save registry to disk. + + This method assumes it's called within a lock context. + + Raises: + Exception: If unable to write to storage file + """ + try: + # Convert ServerConfig objects to dictionaries for JSON serialization + data = {} + for server_id, config in self._servers.items(): + data[server_id] = { + "id": config.id, + "name": config.name, + "type": config.type, + "enabled": config.enabled, + "config": config.config, + } + + # Ensure directory exists + self._storage_path.parent.mkdir(parents=True, exist_ok=True) + + # Write to temporary file first, then rename (atomic operation) + temp_path = self._storage_path.with_suffix(".tmp") + with open(temp_path, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2, ensure_ascii=False) + + # Atomic rename + temp_path.replace(self._storage_path) + + logger.debug( + f"Persisted {len(self._servers)} server configurations to {self._storage_path}" + ) + + except Exception as e: + logger.error(f"Failed to persist server registry: {e}") + raise + + def _load(self) -> None: + """ + Load registry from disk. + + Handles file not existing gracefully by starting with empty registry. + Invalid entries are logged and skipped. + """ + try: + if not self._storage_path.exists(): + logger.info( + f"Registry file {self._storage_path} does not exist, starting with empty registry" + ) + return + + # Check if file is empty + if self._storage_path.stat().st_size == 0: + logger.info( + f"Registry file {self._storage_path} is empty, starting with empty registry" + ) + return + + with open(self._storage_path, "r", encoding="utf-8") as f: + data = json.load(f) + + if not isinstance(data, dict): + logger.warning( + f"Invalid registry format in {self._storage_path}, starting with empty registry" + ) + return + + # Load server configurations + loaded_count = 0 + for server_id, config_data in data.items(): + try: + # Validate the structure + if not isinstance(config_data, dict): + logger.warning( + f"Skipping invalid config for server {server_id}: not a dictionary" + ) + continue + + required_fields = ["id", "name", "type", "config"] + if not all(field in config_data for field in required_fields): + logger.warning( + f"Skipping incomplete config for server {server_id}: missing required fields" + ) + continue + + # Create ServerConfig object + config = ServerConfig( + id=config_data["id"], + name=config_data["name"], + type=config_data["type"], + enabled=config_data.get("enabled", True), + config=config_data["config"], + ) + + # Basic validation + validation_errors = self.validate_config(config) + if validation_errors: + logger.warning( + f"Skipping invalid config for server {server_id}: {'; '.join(validation_errors)}" + ) + continue + + # Store configuration + self._servers[server_id] = config + loaded_count += 1 + + except Exception as e: + logger.warning( + f"Skipping invalid config for server {server_id}: {e}" + ) + continue + + logger.info( + f"Loaded {loaded_count} server configurations from {self._storage_path}" + ) + + except json.JSONDecodeError as e: + logger.error(f"Invalid JSON in registry file {self._storage_path}: {e}") + logger.info("Starting with empty registry") + except Exception as e: + logger.error(f"Failed to load server registry: {e}") + logger.info("Starting with empty registry") diff --git a/code_puppy/mcp_/retry_manager.py b/code_puppy/mcp_/retry_manager.py new file mode 100644 index 00000000..d32cdf57 --- /dev/null +++ b/code_puppy/mcp_/retry_manager.py @@ -0,0 +1,324 @@ +""" +Retry manager for MCP server communication with various backoff strategies. + +This module provides retry logic for handling transient failures in MCP server +communication with intelligent backoff strategies to prevent overwhelming failed servers. +""" + +import asyncio +import logging +import random +from collections import defaultdict +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Callable, Dict, Optional + +import httpx + +logger = logging.getLogger(__name__) + + +@dataclass +class RetryStats: + """Statistics for retry operations per server.""" + + total_retries: int = 0 + successful_retries: int = 0 + failed_retries: int = 0 + average_attempts: float = 0.0 + last_retry: Optional[datetime] = None + + def calculate_average(self, new_attempts: int) -> None: + """Update the average attempts calculation.""" + if self.total_retries == 0: + self.average_attempts = float(new_attempts) + else: + # Calculate new average: (old_average * old_count + new_value) / new_count + total_attempts = (self.average_attempts * self.total_retries) + new_attempts + self.average_attempts = total_attempts / (self.total_retries + 1) + + +class RetryManager: + """ + Manages retry logic for MCP server operations with various backoff strategies. + + Supports different backoff strategies and intelligent retry decisions based on + error types. Tracks retry statistics per server for monitoring. + """ + + def __init__(self): + """Initialize the retry manager.""" + self._stats: Dict[str, RetryStats] = defaultdict(RetryStats) + self._lock = asyncio.Lock() + + async def retry_with_backoff( + self, + func: Callable, + max_attempts: int = 3, + strategy: str = "exponential", + server_id: str = "unknown", + ) -> Any: + """ + Execute a function with retry logic and backoff strategy. + + Args: + func: The async function to execute + max_attempts: Maximum number of retry attempts + strategy: Backoff strategy ('fixed', 'linear', 'exponential', 'exponential_jitter') + server_id: ID of the server for tracking stats + + Returns: + The result of the function call + + Raises: + The last exception encountered if all retries fail + """ + last_exception = None + + for attempt in range(max_attempts): + try: + result = await func() + + # Record successful retry if this wasn't the first attempt + if attempt > 0: + await self.record_retry(server_id, attempt + 1, success=True) + + return result + + except Exception as e: + last_exception = e + + # Check if this error is retryable + if not self.should_retry(e): + logger.info( + f"Non-retryable error for server {server_id}: {type(e).__name__}: {e}" + ) + await self.record_retry(server_id, attempt + 1, success=False) + raise e + + # If this is the last attempt, don't wait + if attempt == max_attempts - 1: + await self.record_retry(server_id, max_attempts, success=False) + break + + # Calculate backoff delay + delay = self.calculate_backoff(attempt + 1, strategy) + + logger.warning( + f"Attempt {attempt + 1}/{max_attempts} failed for server {server_id}: " + f"{type(e).__name__}: {e}. Retrying in {delay:.2f}s" + ) + + # Wait before retrying + await asyncio.sleep(delay) + + # All attempts failed + logger.error( + f"All {max_attempts} attempts failed for server {server_id}. " + f"Last error: {type(last_exception).__name__}: {last_exception}" + ) + raise last_exception + + def calculate_backoff(self, attempt: int, strategy: str) -> float: + """ + Calculate backoff delay based on attempt number and strategy. + + Args: + attempt: The current attempt number (1-based) + strategy: The backoff strategy to use + + Returns: + Delay in seconds + """ + if strategy == "fixed": + return 1.0 + + elif strategy == "linear": + return float(attempt) + + elif strategy == "exponential": + return 2.0 ** (attempt - 1) + + elif strategy == "exponential_jitter": + base_delay = 2.0 ** (attempt - 1) + jitter = random.uniform(-0.25, 0.25) # ±25% jitter + return max(0.1, base_delay * (1 + jitter)) + + else: + logger.warning(f"Unknown backoff strategy: {strategy}, using exponential") + return 2.0 ** (attempt - 1) + + def should_retry(self, error: Exception) -> bool: + """ + Determine if an error is retryable. + + Args: + error: The exception to evaluate + + Returns: + True if the error is retryable, False otherwise + """ + # Network timeouts and connection errors are retryable + if isinstance(error, (asyncio.TimeoutError, ConnectionError, OSError)): + return True + + # HTTP errors + if isinstance(error, httpx.HTTPError): + if isinstance(error, httpx.TimeoutException): + return True + elif isinstance(error, httpx.ConnectError): + return True + elif isinstance(error, httpx.ReadError): + return True + elif hasattr(error, "response") and error.response is not None: + status_code = error.response.status_code + # 5xx server errors are retryable + if 500 <= status_code < 600: + return True + # Rate limit errors are retryable (with longer backoff) + if status_code == 429: + return True + # 4xx client errors are generally not retryable + # except for specific cases like 408 (timeout) + if status_code == 408: + return True + return False + + # JSON decode errors might be transient + if isinstance(error, ValueError) and "json" in str(error).lower(): + return True + + # Authentication and authorization errors are not retryable + error_str = str(error).lower() + if any( + term in error_str + for term in ["unauthorized", "forbidden", "authentication", "permission"] + ): + return False + + # Schema validation errors are not retryable + if "schema" in error_str or "validation" in error_str: + return False + + # By default, consider other errors as potentially retryable + # This is conservative but helps handle unknown transient issues + return True + + async def record_retry(self, server_id: str, attempts: int, success: bool) -> None: + """ + Record retry statistics for a server. + + Args: + server_id: ID of the server + attempts: Number of attempts made + success: Whether the retry was successful + """ + async with self._lock: + stats = self._stats[server_id] + stats.last_retry = datetime.now() + + if success: + stats.successful_retries += 1 + else: + stats.failed_retries += 1 + + stats.calculate_average(attempts) + stats.total_retries += 1 + + async def get_retry_stats(self, server_id: str) -> RetryStats: + """ + Get retry statistics for a server. + + Args: + server_id: ID of the server + + Returns: + RetryStats object with current statistics + """ + async with self._lock: + # Return a copy to avoid external modification + stats = self._stats[server_id] + return RetryStats( + total_retries=stats.total_retries, + successful_retries=stats.successful_retries, + failed_retries=stats.failed_retries, + average_attempts=stats.average_attempts, + last_retry=stats.last_retry, + ) + + async def get_all_stats(self) -> Dict[str, RetryStats]: + """ + Get retry statistics for all servers. + + Returns: + Dictionary mapping server IDs to their retry statistics + """ + async with self._lock: + return { + server_id: RetryStats( + total_retries=stats.total_retries, + successful_retries=stats.successful_retries, + failed_retries=stats.failed_retries, + average_attempts=stats.average_attempts, + last_retry=stats.last_retry, + ) + for server_id, stats in self._stats.items() + } + + async def clear_stats(self, server_id: str) -> None: + """ + Clear retry statistics for a server. + + Args: + server_id: ID of the server + """ + async with self._lock: + if server_id in self._stats: + del self._stats[server_id] + + async def clear_all_stats(self) -> None: + """Clear retry statistics for all servers.""" + async with self._lock: + self._stats.clear() + + +# Global retry manager instance +_retry_manager_instance: Optional[RetryManager] = None + + +def get_retry_manager() -> RetryManager: + """ + Get the global retry manager instance (singleton pattern). + + Returns: + The global RetryManager instance + """ + global _retry_manager_instance + if _retry_manager_instance is None: + _retry_manager_instance = RetryManager() + return _retry_manager_instance + + +# Convenience function for common retry patterns +async def retry_mcp_call( + func: Callable, + server_id: str, + max_attempts: int = 3, + strategy: str = "exponential_jitter", +) -> Any: + """ + Convenience function for retrying MCP calls with sensible defaults. + + Args: + func: The async function to execute + server_id: ID of the server for tracking + max_attempts: Maximum retry attempts + strategy: Backoff strategy + + Returns: + The result of the function call + """ + retry_manager = get_retry_manager() + return await retry_manager.retry_with_backoff( + func=func, max_attempts=max_attempts, strategy=strategy, server_id=server_id + ) diff --git a/code_puppy/mcp_/server_registry_catalog.py b/code_puppy/mcp_/server_registry_catalog.py new file mode 100644 index 00000000..cc2b9029 --- /dev/null +++ b/code_puppy/mcp_/server_registry_catalog.py @@ -0,0 +1,1094 @@ +""" +MCP Server Registry Catalog - Pre-configured MCP servers. +A curated collection of MCP servers that can be easily searched and installed. +""" + +from dataclasses import dataclass, field +from typing import Dict, List, Optional, Union + + +@dataclass +class MCPServerRequirements: + """Comprehensive requirements for an MCP server installation.""" + + environment_vars: List[str] = field( + default_factory=list + ) # ["GITHUB_TOKEN", "API_KEY"] + command_line_args: List[Dict[str, Union[str, bool]]] = field( + default_factory=list + ) # [{"name": "port", "prompt": "Port number", "default": "3000", "required": False}] + required_tools: List[str] = field( + default_factory=list + ) # ["node", "python", "npm", "npx"] + package_dependencies: List[str] = field( + default_factory=list + ) # ["jupyter", "@modelcontextprotocol/server-discord"] + system_requirements: List[str] = field( + default_factory=list + ) # ["Docker installed", "Git configured"] + + +@dataclass +class MCPServerTemplate: + """Template for a pre-configured MCP server.""" + + id: str + name: str + display_name: str + description: str + category: str + tags: List[str] + type: str # "stdio", "http", "sse" + config: Dict + author: str = "Community" + verified: bool = False + popular: bool = False + requires: Union[List[str], MCPServerRequirements] = field( + default_factory=list + ) # Backward compatible + example_usage: str = "" + + def get_requirements(self) -> MCPServerRequirements: + """Get requirements as MCPServerRequirements object.""" + if isinstance(self.requires, list): + # Backward compatibility - treat as required_tools + return MCPServerRequirements(required_tools=self.requires) + return self.requires + + def get_environment_vars(self) -> List[str]: + """Get list of required environment variables.""" + requirements = self.get_requirements() + env_vars = requirements.environment_vars.copy() + + # Also check config for env vars (existing logic) + if "env" in self.config: + for key, value in self.config["env"].items(): + if isinstance(value, str) and value.startswith("$"): + var_name = value[1:] + if var_name not in env_vars: + env_vars.append(var_name) + + return env_vars + + def get_command_line_args(self) -> List[Dict]: + """Get list of configurable command line arguments.""" + return self.get_requirements().command_line_args + + def get_required_tools(self) -> List[str]: + """Get list of required system tools.""" + return self.get_requirements().required_tools + + def get_package_dependencies(self) -> List[str]: + """Get list of package dependencies.""" + return self.get_requirements().package_dependencies + + def get_system_requirements(self) -> List[str]: + """Get list of system requirements.""" + return self.get_requirements().system_requirements + + def to_server_config(self, custom_name: Optional[str] = None, **cmd_args) -> Dict: + """Convert template to server configuration with optional overrides. + + Replaces placeholders in the config with actual values. + Placeholders are in the format ${ARG_NAME} in args array. + """ + import copy + + config = { + "name": custom_name or self.name, + "type": self.type, + **copy.deepcopy(self.config), + } + + # Apply command line argument substitutions + if cmd_args and "args" in config: + new_args = [] + for arg in config["args"]: + # Check if this arg contains a placeholder like ${db_path} + if isinstance(arg, str) and "${" in arg: + # Replace all placeholders in this arg + new_arg = arg + for key, value in cmd_args.items(): + placeholder = f"${{{key}}}" + if placeholder in new_arg: + new_arg = new_arg.replace(placeholder, str(value)) + new_args.append(new_arg) + else: + new_args.append(arg) + config["args"] = new_args + + # Also handle environment variable placeholders + if "env" in config: + for env_key, env_value in config["env"].items(): + if isinstance(env_value, str) and "${" in env_value: + # Replace placeholders in env values + for key, value in cmd_args.items(): + placeholder = f"${{{key}}}" + if placeholder in env_value: + config["env"][env_key] = env_value.replace( + placeholder, str(value) + ) + + return config + + +# Pre-configured MCP Server Registry +MCP_SERVER_REGISTRY: List[MCPServerTemplate] = [ + MCPServerTemplate( + id="serena", + name="serena", + display_name="Serena", + description="Code Generation MCP Tooling", + tags=["Agentic", "Code", "SDK", "AI"], + category="Code", + type="stdio", + config={ + "command": "uvx", + "args": [ + "--from", + "git+https://github.com/oraios/serena", + "serena", + "start-mcp-server", + ], + }, + verified=True, + popular=True, + example_usage="Agentic AI for writing programs", + requires=["uvx"], + ), + # ========== File System & Storage ========== + MCPServerTemplate( + id="filesystem", + name="filesystem", + display_name="Filesystem Access", + description="Read and write files in specified directories", + category="Storage", + tags=["files", "io", "read", "write", "directory"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=["node", "npm"], + example_usage="Access and modify files in /tmp directory", + ), + MCPServerTemplate( + id="filesystem-home", + name="filesystem-home", + display_name="Home Directory Access", + description="Read and write files in user's home directory", + category="Storage", + tags=["files", "home", "user", "personal"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem", "~"], + "timeout": 30, + }, + verified=True, + requires=["node", "npm"], + ), + # Enhanced server with comprehensive requirements + MCPServerTemplate( + id="gdrive", + name="gdrive", + display_name="Google Drive", + description="Access and manage Google Drive files with OAuth2 authentication", + category="Storage", + tags=["google", "drive", "cloud", "storage", "sync", "oauth"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-gdrive"], + "env": { + "GOOGLE_CLIENT_ID": "$GOOGLE_CLIENT_ID", + "GOOGLE_CLIENT_SECRET": "$GOOGLE_CLIENT_SECRET", + }, + }, + requires=MCPServerRequirements( + environment_vars=["GOOGLE_CLIENT_ID", "GOOGLE_CLIENT_SECRET"], + command_line_args=[ + { + "name": "port", + "prompt": "OAuth redirect port", + "default": "3000", + "required": False, + }, + { + "name": "scope", + "prompt": "Google Drive API scope", + "default": "https://www.googleapis.com/auth/drive.readonly", + "required": False, + }, + ], + required_tools=["node", "npx", "npm"], + package_dependencies=["@modelcontextprotocol/server-gdrive"], + system_requirements=["Internet connection for OAuth"], + ), + verified=True, + popular=True, + example_usage="List files: 'Show me my Google Drive files'", + ), + # Regular server (backward compatible) + MCPServerTemplate( + id="filesystem-simple", + name="filesystem-simple", + display_name="Simple Filesystem", + description="Basic filesystem access", + category="Storage", + tags=["files", "basic"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["GOOGLE_CLIENT_ID", "GOOGLE_CLIENT_SECRET"], + command_line_args=[ + { + "name": "port", + "prompt": "OAuth redirect port", + "default": "3000", + "required": False, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-gdrive"], + ), + ), + # ========== Databases ========== + MCPServerTemplate( + id="postgres", + name="postgres", + display_name="PostgreSQL Database", + description="Connect to and query PostgreSQL databases", + category="Database", + tags=["database", "sql", "postgres", "postgresql", "query"], + type="stdio", + config={ + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-postgres", + "${connection_string}", + ], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["DATABASE_URL"], + command_line_args=[ + { + "name": "connection_string", + "prompt": "PostgreSQL connection string", + "default": "postgresql://localhost/mydb", + "required": True, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-postgres"], + system_requirements=["PostgreSQL server running"], + ), + example_usage="postgresql://user:password@localhost:5432/dbname", + ), + MCPServerTemplate( + id="sqlite", + name="sqlite", + display_name="SQLite Database", + description="Connect to and query SQLite databases", + category="Database", + tags=["database", "sql", "sqlite", "local", "embedded"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "mcp-sqlite", "${db_path}"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + command_line_args=[ + { + "name": "db_path", + "prompt": "Path to SQLite database file", + "default": "./database.db", + "required": True, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-sqlite"], + ), + ), + MCPServerTemplate( + id="mysql", + name="mysql", + display_name="MySQL Database", + description="Connect to and query MySQL databases", + category="Database", + tags=["database", "sql", "mysql", "mariadb", "query"], + type="stdio", + config={ + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-mysql", + "${connection_string}", + ], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["MYSQL_URL"], + command_line_args=[ + { + "name": "connection_string", + "prompt": "MySQL connection string", + "default": "mysql://localhost/mydb", + "required": True, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-mysql"], + system_requirements=["MySQL server running"], + ), + ), + MCPServerTemplate( + id="mongodb", + name="mongodb", + display_name="MongoDB Database", + description="Connect to and query MongoDB databases", + category="Database", + tags=["database", "nosql", "mongodb", "document", "query"], + type="stdio", + config={ + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-mongodb", + "${connection_string}", + ], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["MONGODB_URI"], + command_line_args=[ + { + "name": "connection_string", + "prompt": "MongoDB connection string", + "default": "mongodb://localhost:27017/mydb", + "required": True, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-mongodb"], + system_requirements=["MongoDB server running"], + ), + ), + # ========== Development Tools ========== + MCPServerTemplate( + id="git", + name="git", + display_name="Git Repository", + description="Manage Git repositories and perform version control operations", + category="Development", + tags=["git", "version-control", "repository", "commit", "branch"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-git"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx", "git"], + package_dependencies=["@modelcontextprotocol/server-git"], + system_requirements=["Git repository initialized"], + ), + ), + MCPServerTemplate( + id="github", + name="github", + display_name="GitHub API", + description="Access GitHub repositories, issues, PRs, and more", + category="Development", + tags=["github", "api", "repository", "issues", "pull-requests"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-github"], + "env": {"GITHUB_TOKEN": "$GITHUB_TOKEN"}, + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["GITHUB_TOKEN"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-github"], + system_requirements=["GitHub account with personal access token"], + ), + ), + MCPServerTemplate( + id="gitlab", + name="gitlab", + display_name="GitLab API", + description="Access GitLab repositories, issues, and merge requests", + category="Development", + tags=["gitlab", "api", "repository", "issues", "merge-requests"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-gitlab"], + "env": {"GITLAB_TOKEN": "$GITLAB_TOKEN"}, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["GITLAB_TOKEN"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-gitlab"], + system_requirements=["GitLab account with personal access token"], + ), + ), + # ========== Web & Browser ========== + MCPServerTemplate( + id="puppeteer", + name="puppeteer", + display_name="Puppeteer Browser", + description="Control headless Chrome for web scraping and automation", + category="Web", + tags=["browser", "web", "scraping", "automation", "chrome", "puppeteer"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-puppeteer"], + "timeout": 60, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + command_line_args=[ + { + "name": "headless", + "prompt": "Run in headless mode", + "default": "true", + "required": False, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-puppeteer"], + system_requirements=["Chrome/Chromium browser"], + ), + ), + MCPServerTemplate( + id="playwright", + name="playwright", + display_name="Playwright Browser", + description="Cross-browser automation for web testing and scraping", + category="Web", + tags=["browser", "web", "testing", "automation", "playwright"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-playwright"], + "timeout": 60, + }, + verified=True, + requires=MCPServerRequirements( + command_line_args=[ + { + "name": "browser", + "prompt": "Browser to use", + "default": "chromium", + "required": False, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-playwright"], + system_requirements=["Playwright browsers (will be installed)"], + ), + ), + MCPServerTemplate( + id="fetch", + name="fetch", + display_name="Web Fetch", + description="Fetch and process web pages and APIs", + category="Web", + tags=["web", "http", "api", "fetch", "request"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-fetch"], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-fetch"], + ), + ), + # ========== Communication ========== + MCPServerTemplate( + id="slack", + name="slack", + display_name="Slack Integration", + description="Send messages and interact with Slack workspaces", + category="Communication", + tags=["slack", "chat", "messaging", "notification"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-slack"], + "env": {"SLACK_TOKEN": "$SLACK_TOKEN"}, + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["SLACK_TOKEN"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-slack"], + system_requirements=["Slack app with bot token"], + ), + ), + MCPServerTemplate( + id="discord", + name="discord", + display_name="Discord Bot", + description="Interact with Discord servers and channels", + category="Communication", + tags=["discord", "chat", "bot", "messaging"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-discord"], + "env": {"DISCORD_TOKEN": "$DISCORD_TOKEN"}, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["DISCORD_TOKEN"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-discord"], + system_requirements=["Discord bot token"], + ), + ), + MCPServerTemplate( + id="email", + name="email", + display_name="Email (SMTP/IMAP)", + description="Send and receive emails", + category="Communication", + tags=["email", "smtp", "imap", "mail"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-email"], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["EMAIL_HOST", "EMAIL_PORT", "EMAIL_USER", "EMAIL_PASS"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-email"], + ), + ), + # ========== AI & Machine Learning ========== + MCPServerTemplate( + id="openai", + name="openai", + display_name="OpenAI API", + description="Access OpenAI models for text, image, and embedding generation", + category="AI", + tags=["ai", "openai", "gpt", "dalle", "embedding"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-openai"], + "env": {"OPENAI_API_KEY": "$OPENAI_API_KEY"}, + "timeout": 60, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["OPENAI_API_KEY"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-openai"], + ), + ), + MCPServerTemplate( + id="anthropic", + name="anthropic", + display_name="Anthropic Claude API", + description="Access Anthropic's Claude models", + category="AI", + tags=["ai", "anthropic", "claude", "llm"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-anthropic"], + "env": {"ANTHROPIC_API_KEY": "$ANTHROPIC_API_KEY"}, + "timeout": 60, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["ANTHROPIC_API_KEY"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-anthropic"], + ), + ), + # ========== Data Processing ========== + MCPServerTemplate( + id="pandas", + name="pandas", + display_name="Pandas Data Analysis", + description="Process and analyze data using Python pandas", + category="Data", + tags=["data", "pandas", "python", "analysis", "csv", "dataframe"], + type="stdio", + config={ + "command": "python", + "args": ["-m", "mcp_server_pandas"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + required_tools=["python", "pip"], + package_dependencies=["pandas", "mcp-server-pandas"], + ), + ), + MCPServerTemplate( + id="jupyter", + name="jupyter", + display_name="Jupyter Notebook", + description="Execute code in Jupyter notebooks", + category="Data", + tags=["jupyter", "notebook", "python", "data-science"], + type="stdio", + config={ + "command": "python", + "args": ["-m", "mcp_server_jupyter"], + "timeout": 60, + }, + verified=True, + requires=MCPServerRequirements( + required_tools=["python", "pip", "jupyter"], + package_dependencies=["jupyter", "mcp-server-jupyter"], + ), + ), + # ========== Cloud Services ========== + MCPServerTemplate( + id="aws-s3", + name="aws-s3", + display_name="AWS S3 Storage", + description="Manage AWS S3 buckets and objects", + category="Cloud", + tags=["aws", "s3", "storage", "cloud", "bucket"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-aws-s3"], + "env": { + "AWS_ACCESS_KEY_ID": "$AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY": "$AWS_SECRET_ACCESS_KEY", + }, + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"], + command_line_args=[ + { + "name": "region", + "prompt": "AWS region", + "default": "us-east-1", + "required": False, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-aws-s3"], + system_requirements=["AWS account with S3 access"], + ), + ), + MCPServerTemplate( + id="azure-storage", + name="azure-storage", + display_name="Azure Storage", + description="Manage Azure blob storage", + category="Cloud", + tags=["azure", "storage", "cloud", "blob"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-azure-storage"], + "env": { + "AZURE_STORAGE_CONNECTION_STRING": "$AZURE_STORAGE_CONNECTION_STRING" + }, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["AZURE_STORAGE_CONNECTION_STRING"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-azure-storage"], + system_requirements=["Azure storage account"], + ), + ), + # ========== Security & Authentication ========== + MCPServerTemplate( + id="1password", + name="1password", + display_name="1Password Vault", + description="Access 1Password vaults securely", + category="Security", + tags=["security", "password", "vault", "1password", "secrets"], + type="stdio", + config={"command": "op", "args": ["mcp-server"], "timeout": 30}, + verified=True, + requires=MCPServerRequirements( + required_tools=["op"], + system_requirements=["1Password CLI installed and authenticated"], + ), + ), + MCPServerTemplate( + id="vault", + name="vault", + display_name="HashiCorp Vault", + description="Manage secrets in HashiCorp Vault", + category="Security", + tags=["security", "vault", "secrets", "hashicorp"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-vault"], + "env": {"VAULT_TOKEN": "$VAULT_TOKEN"}, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["VAULT_TOKEN", "VAULT_ADDR"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-vault"], + system_requirements=["HashiCorp Vault server accessible"], + ), + ), + # ========== Documentation & Knowledge ========== + MCPServerTemplate( + id="context7", + name="context7", + display_name="Context7 Documentation Search", + description="Search and retrieve documentation from multiple sources with AI-powered context understanding", + category="Documentation", + tags=["documentation", "search", "context", "ai", "knowledge", "docs", "cloud"], + type="http", + config={ + "url": "https://mcp.context7.com/mcp", + "headers": {"Authorization": "Bearer $CONTEXT7_API_KEY"}, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["CONTEXT7_API_KEY"], + ), + example_usage="Cloud-based service - no local setup required", + ), + MCPServerTemplate( + id="confluence", + name="confluence", + display_name="Confluence Wiki", + description="Access and manage Confluence pages", + category="Documentation", + tags=["wiki", "confluence", "documentation", "atlassian"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-confluence"], + "env": {"CONFLUENCE_TOKEN": "$CONFLUENCE_TOKEN"}, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["CONFLUENCE_TOKEN", "CONFLUENCE_BASE_URL"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-confluence"], + system_requirements=["Confluence API access"], + ), + ), + MCPServerTemplate( + id="notion", + name="notion", + display_name="Notion Workspace", + description="Access and manage Notion pages and databases", + category="Documentation", + tags=["notion", "wiki", "documentation", "database"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-notion"], + "env": {"NOTION_TOKEN": "$NOTION_TOKEN"}, + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + environment_vars=["NOTION_TOKEN"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-notion"], + system_requirements=["Notion integration API key"], + ), + ), + # ========== DevOps & Infrastructure ========== + MCPServerTemplate( + id="docker", + name="docker", + display_name="Docker Management", + description="Manage Docker containers and images", + category="DevOps", + tags=["docker", "container", "devops", "infrastructure"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-docker"], + "timeout": 30, + }, + verified=True, + popular=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx", "docker"], + package_dependencies=["@modelcontextprotocol/server-docker"], + system_requirements=["Docker daemon running"], + ), + ), + MCPServerTemplate( + id="kubernetes", + name="kubernetes", + display_name="Kubernetes Cluster", + description="Manage Kubernetes resources", + category="DevOps", + tags=["kubernetes", "k8s", "container", "orchestration"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-kubernetes"], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx", "kubectl"], + package_dependencies=["@modelcontextprotocol/server-kubernetes"], + system_requirements=["Kubernetes cluster access (kubeconfig)"], + ), + ), + MCPServerTemplate( + id="terraform", + name="terraform", + display_name="Terraform Infrastructure", + description="Manage infrastructure as code with Terraform", + category="DevOps", + tags=["terraform", "iac", "infrastructure", "devops"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-terraform"], + "timeout": 60, + }, + verified=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx", "terraform"], + package_dependencies=["@modelcontextprotocol/server-terraform"], + system_requirements=["Terraform configuration files"], + ), + ), + # ========== Monitoring & Observability ========== + MCPServerTemplate( + id="prometheus", + name="prometheus", + display_name="Prometheus Metrics", + description="Query Prometheus metrics", + category="Monitoring", + tags=["monitoring", "metrics", "prometheus", "observability"], + type="stdio", + config={ + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-prometheus", + "${prometheus_url}", + ], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + command_line_args=[ + { + "name": "prometheus_url", + "prompt": "Prometheus server URL", + "default": "http://localhost:9090", + "required": True, + } + ], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-prometheus"], + system_requirements=["Prometheus server accessible"], + ), + ), + MCPServerTemplate( + id="grafana", + name="grafana", + display_name="Grafana Dashboards", + description="Access Grafana dashboards and alerts", + category="Monitoring", + tags=["monitoring", "dashboard", "grafana", "visualization"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-grafana"], + "env": {"GRAFANA_TOKEN": "$GRAFANA_TOKEN"}, + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + environment_vars=["GRAFANA_TOKEN", "GRAFANA_URL"], + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-grafana"], + system_requirements=["Grafana server with API access"], + ), + ), + # ========== Package Management ========== + MCPServerTemplate( + id="npm", + name="npm", + display_name="NPM Package Manager", + description="Search and manage NPM packages", + category="Package Management", + tags=["npm", "node", "package", "javascript"], + type="stdio", + config={ + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-npm"], + "timeout": 30, + }, + verified=True, + requires=MCPServerRequirements( + required_tools=["node", "npm", "npx"], + package_dependencies=["@modelcontextprotocol/server-npm"], + ), + ), + MCPServerTemplate( + id="pypi", + name="pypi", + display_name="PyPI Package Manager", + description="Search and manage Python packages", + category="Package Management", + tags=["python", "pip", "pypi", "package"], + type="stdio", + config={"command": "python", "args": ["-m", "mcp_server_pypi"], "timeout": 30}, + verified=True, + requires=MCPServerRequirements( + required_tools=["python", "pip"], package_dependencies=["mcp-server-pypi"] + ), + ), +] + + +class MCPServerCatalog: + """Catalog for searching and managing pre-configured MCP servers.""" + + def __init__(self): + self.servers = MCP_SERVER_REGISTRY + self._build_index() + + def _build_index(self): + """Build search index for fast lookups.""" + self.by_id = {s.id: s for s in self.servers} + self.by_category = {} + for server in self.servers: + if server.category not in self.by_category: + self.by_category[server.category] = [] + self.by_category[server.category].append(server) + + def search(self, query: str) -> List[MCPServerTemplate]: + """ + Search for servers by name, description, or tags. + + Args: + query: Search query string + + Returns: + List of matching server templates + """ + query_lower = query.lower() + results = [] + + for server in self.servers: + # Check name + if query_lower in server.name.lower(): + results.append(server) + continue + + # Check display name + if query_lower in server.display_name.lower(): + results.append(server) + continue + + # Check description + if query_lower in server.description.lower(): + results.append(server) + continue + + # Check tags + for tag in server.tags: + if query_lower in tag.lower(): + results.append(server) + break + + # Check category + if query_lower in server.category.lower() and server not in results: + results.append(server) + + # Sort by relevance (name matches first, then popular) + results.sort( + key=lambda s: ( + not s.name.lower().startswith(query_lower), + not s.popular, + s.name, + ) + ) + + return results + + def get_by_id(self, server_id: str) -> Optional[MCPServerTemplate]: + """Get server template by ID.""" + return self.by_id.get(server_id) + + def get_by_category(self, category: str) -> List[MCPServerTemplate]: + """Get all servers in a category.""" + return self.by_category.get(category, []) + + def list_categories(self) -> List[str]: + """List all available categories.""" + return sorted(self.by_category.keys()) + + def get_popular(self, limit: int = 10) -> List[MCPServerTemplate]: + """Get popular servers.""" + popular = [s for s in self.servers if s.popular] + return popular[:limit] + + def get_verified(self) -> List[MCPServerTemplate]: + """Get all verified servers.""" + return [s for s in self.servers if s.verified] + + +# Global catalog instance +catalog = MCPServerCatalog() diff --git a/code_puppy/mcp_/status_tracker.py b/code_puppy/mcp_/status_tracker.py new file mode 100644 index 00000000..0feb0db7 --- /dev/null +++ b/code_puppy/mcp_/status_tracker.py @@ -0,0 +1,355 @@ +""" +Server Status Tracker for monitoring MCP server runtime status. + +This module provides the ServerStatusTracker class that tracks the runtime +status of MCP servers including state, metrics, and events. +""" + +import logging +import threading +from collections import defaultdict, deque +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Any, Dict, List, Optional + +from .managed_server import ServerState + +# Configure logging +logger = logging.getLogger(__name__) + + +@dataclass +class Event: + """Data class representing a server event.""" + + timestamp: datetime + event_type: str # "started", "stopped", "error", "health_check", etc. + details: Dict + server_id: str + + +class ServerStatusTracker: + """ + Tracks the runtime status of MCP servers including state, metrics, and events. + + This class provides in-memory storage for server states, metadata, and events + with thread-safe operations using locks. Events are stored using collections.deque + for automatic size limiting. + + Example usage: + tracker = ServerStatusTracker() + tracker.set_status("server1", ServerState.RUNNING) + tracker.record_event("server1", "started", {"message": "Server started successfully"}) + events = tracker.get_events("server1", limit=10) + """ + + def __init__(self): + """Initialize the status tracker with thread-safe data structures.""" + # Thread safety lock + self._lock = threading.RLock() + + # Server states (server_id -> ServerState) + self._server_states: Dict[str, ServerState] = {} + + # Server metadata (server_id -> key -> value) + self._server_metadata: Dict[str, Dict[str, Any]] = defaultdict(dict) + + # Server events (server_id -> deque of events) + # Using deque with maxlen for automatic size limiting + self._server_events: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000)) + + # Server timing information + self._start_times: Dict[str, datetime] = {} + self._stop_times: Dict[str, datetime] = {} + + logger.info("ServerStatusTracker initialized") + + def set_status(self, server_id: str, state: ServerState) -> None: + """ + Set the current state of a server. + + Args: + server_id: Unique identifier for the server + state: New server state + """ + with self._lock: + old_state = self._server_states.get(server_id) + self._server_states[server_id] = state + + # Record state change event + self.record_event( + server_id, + "state_change", + { + "old_state": old_state.value if old_state else None, + "new_state": state.value, + "message": f"State changed from {old_state.value if old_state else 'unknown'} to {state.value}", + }, + ) + + logger.debug(f"Server {server_id} state changed: {old_state} -> {state}") + + def get_status(self, server_id: str) -> ServerState: + """ + Get the current state of a server. + + Args: + server_id: Unique identifier for the server + + Returns: + Current server state, defaults to STOPPED if not found + """ + with self._lock: + return self._server_states.get(server_id, ServerState.STOPPED) + + def set_metadata(self, server_id: str, key: str, value: Any) -> None: + """ + Set metadata value for a server. + + Args: + server_id: Unique identifier for the server + key: Metadata key + value: Metadata value (can be any type) + """ + with self._lock: + if server_id not in self._server_metadata: + self._server_metadata[server_id] = {} + + old_value = self._server_metadata[server_id].get(key) + self._server_metadata[server_id][key] = value + + # Record metadata change event + self.record_event( + server_id, + "metadata_update", + { + "key": key, + "old_value": old_value, + "new_value": value, + "message": f"Metadata '{key}' updated", + }, + ) + + logger.debug(f"Server {server_id} metadata updated: {key} = {value}") + + def get_metadata(self, server_id: str, key: str) -> Any: + """ + Get metadata value for a server. + + Args: + server_id: Unique identifier for the server + key: Metadata key + + Returns: + Metadata value or None if not found + """ + with self._lock: + return self._server_metadata.get(server_id, {}).get(key) + + def record_event(self, server_id: str, event_type: str, details: Dict) -> None: + """ + Record an event for a server. + + Args: + server_id: Unique identifier for the server + event_type: Type of event (e.g., "started", "stopped", "error", "health_check") + details: Dictionary containing event details + """ + with self._lock: + event = Event( + timestamp=datetime.now(), + event_type=event_type, + details=details.copy() + if details + else {}, # Copy to prevent modification + server_id=server_id, + ) + + # Add to deque (automatically handles size limiting) + self._server_events[server_id].append(event) + + logger.debug(f"Event recorded for server {server_id}: {event_type}") + + def get_events(self, server_id: str, limit: int = 100) -> List[Event]: + """ + Get recent events for a server. + + Args: + server_id: Unique identifier for the server + limit: Maximum number of events to return (default: 100) + + Returns: + List of events ordered by timestamp (most recent first) + """ + with self._lock: + events = list(self._server_events.get(server_id, deque())) + + # Return most recent events first, limited by count + events.reverse() # Most recent first + return events[:limit] + + def clear_events(self, server_id: str) -> None: + """ + Clear all events for a server. + + Args: + server_id: Unique identifier for the server + """ + with self._lock: + if server_id in self._server_events: + self._server_events[server_id].clear() + logger.info(f"Cleared all events for server: {server_id}") + + def get_uptime(self, server_id: str) -> Optional[timedelta]: + """ + Calculate uptime for a server based on start/stop times. + + Args: + server_id: Unique identifier for the server + + Returns: + Server uptime as timedelta, or None if server never started + """ + with self._lock: + start_time = self._start_times.get(server_id) + if start_time is None: + return None + + # If server is currently running, calculate from start time to now + current_state = self.get_status(server_id) + if current_state == ServerState.RUNNING: + return datetime.now() - start_time + + # If server is stopped, calculate from start to stop time + stop_time = self._stop_times.get(server_id) + if stop_time is not None and stop_time > start_time: + return stop_time - start_time + + # If we have start time but no valid stop time, assume currently running + return datetime.now() - start_time + + def record_start_time(self, server_id: str) -> None: + """ + Record the start time for a server. + + Args: + server_id: Unique identifier for the server + """ + with self._lock: + start_time = datetime.now() + self._start_times[server_id] = start_time + + # Record start event + self.record_event( + server_id, + "started", + {"start_time": start_time.isoformat(), "message": "Server started"}, + ) + + logger.info(f"Recorded start time for server: {server_id}") + + def record_stop_time(self, server_id: str) -> None: + """ + Record the stop time for a server. + + Args: + server_id: Unique identifier for the server + """ + with self._lock: + stop_time = datetime.now() + self._stop_times[server_id] = stop_time + + # Calculate final uptime + start_time = self._start_times.get(server_id) + uptime = None + if start_time: + uptime = stop_time - start_time + + # Record stop event + self.record_event( + server_id, + "stopped", + { + "stop_time": stop_time.isoformat(), + "uptime_seconds": uptime.total_seconds() if uptime else None, + "message": "Server stopped", + }, + ) + + logger.info(f"Recorded stop time for server: {server_id}") + + def get_all_server_ids(self) -> List[str]: + """ + Get all server IDs that have been tracked. + + Returns: + List of all server IDs + """ + with self._lock: + # Combine all sources of server IDs + all_ids = set() + all_ids.update(self._server_states.keys()) + all_ids.update(self._server_metadata.keys()) + all_ids.update(self._server_events.keys()) + all_ids.update(self._start_times.keys()) + all_ids.update(self._stop_times.keys()) + + return sorted(list(all_ids)) + + def get_server_summary(self, server_id: str) -> Dict[str, Any]: + """ + Get comprehensive summary of server status. + + Args: + server_id: Unique identifier for the server + + Returns: + Dictionary containing current state, metadata, recent events, and uptime + """ + with self._lock: + return { + "server_id": server_id, + "state": self.get_status(server_id).value, + "metadata": self._server_metadata.get(server_id, {}).copy(), + "recent_events_count": len(self._server_events.get(server_id, deque())), + "uptime": self.get_uptime(server_id), + "start_time": self._start_times.get(server_id), + "stop_time": self._stop_times.get(server_id), + "last_event_time": ( + list(self._server_events.get(server_id, deque()))[-1].timestamp + if server_id in self._server_events + and len(self._server_events[server_id]) > 0 + else None + ), + } + + def cleanup_old_data(self, days_to_keep: int = 7) -> None: + """ + Clean up old data to prevent memory bloat. + + Args: + days_to_keep: Number of days of data to keep (default: 7) + """ + cutoff_time = datetime.now() - timedelta(days=days_to_keep) + + with self._lock: + cleaned_servers = [] + + for server_id in list(self._server_events.keys()): + events = self._server_events[server_id] + if events: + # Filter out old events + original_count = len(events) + # Convert to list, filter, then create new deque + filtered_events = [ + event for event in events if event.timestamp >= cutoff_time + ] + + # Replace the deque with filtered events + self._server_events[server_id] = deque(filtered_events, maxlen=1000) + + if len(filtered_events) < original_count: + cleaned_servers.append(server_id) + + if cleaned_servers: + logger.info(f"Cleaned old events for {len(cleaned_servers)} servers") diff --git a/code_puppy/mcp_/system_tools.py b/code_puppy/mcp_/system_tools.py new file mode 100644 index 00000000..7c9ffcda --- /dev/null +++ b/code_puppy/mcp_/system_tools.py @@ -0,0 +1,209 @@ +""" +System tool detection and validation for MCP server requirements. +""" + +import shutil +import subprocess +from dataclasses import dataclass +from typing import Dict, List, Optional + + +@dataclass +class ToolInfo: + """Information about a detected system tool.""" + + name: str + available: bool + version: Optional[str] = None + path: Optional[str] = None + error: Optional[str] = None + + +class SystemToolDetector: + """Detect and validate system tools required by MCP servers.""" + + # Tool version commands + VERSION_COMMANDS = { + "node": ["node", "--version"], + "npm": ["npm", "--version"], + "npx": ["npx", "--version"], + "python": ["python", "--version"], + "python3": ["python3", "--version"], + "pip": ["pip", "--version"], + "pip3": ["pip3", "--version"], + "git": ["git", "--version"], + "docker": ["docker", "--version"], + "java": ["java", "-version"], + "go": ["go", "version"], + "rust": ["rustc", "--version"], + "cargo": ["cargo", "--version"], + "julia": ["julia", "--version"], + "R": ["R", "--version"], + "php": ["php", "--version"], + "ruby": ["ruby", "--version"], + "perl": ["perl", "--version"], + "swift": ["swift", "--version"], + "dotnet": ["dotnet", "--version"], + "jupyter": ["jupyter", "--version"], + "code": ["code", "--version"], # VS Code + "vim": ["vim", "--version"], + "emacs": ["emacs", "--version"], + } + + @classmethod + def detect_tool(cls, tool_name: str) -> ToolInfo: + """Detect if a tool is available and get its version.""" + # First check if tool is in PATH + tool_path = shutil.which(tool_name) + + if not tool_path: + return ToolInfo( + name=tool_name, available=False, error=f"{tool_name} not found in PATH" + ) + + # Try to get version + version_cmd = cls.VERSION_COMMANDS.get(tool_name) + version = None + error = None + + if version_cmd: + try: + # Run version command + result = subprocess.run( + version_cmd, capture_output=True, text=True, timeout=10 + ) + + if result.returncode == 0: + # Parse version from output + output = result.stdout.strip() or result.stderr.strip() + version = cls._parse_version(tool_name, output) + else: + error = f"Version check failed: {result.stderr.strip()}" + + except subprocess.TimeoutExpired: + error = "Version check timed out" + except Exception as e: + error = f"Version check error: {str(e)}" + + return ToolInfo( + name=tool_name, available=True, version=version, path=tool_path, error=error + ) + + @classmethod + def detect_tools(cls, tool_names: List[str]) -> Dict[str, ToolInfo]: + """Detect multiple tools.""" + return {name: cls.detect_tool(name) for name in tool_names} + + @classmethod + def _parse_version(cls, tool_name: str, output: str) -> Optional[str]: + """Parse version string from command output.""" + if not output: + return None + + # Common version patterns + import re + + # Try to find version pattern like "v1.2.3" or "1.2.3" + version_patterns = [ + r"v?(\d+\.\d+\.\d+(?:\.\d+)?)", # Standard semver + r"(\d+\.\d+\.\d+)", # Simple version + r"version\s+v?(\d+\.\d+\.\d+)", # "version 1.2.3" + r"v?(\d+\.\d+)", # Major.minor only + ] + + for pattern in version_patterns: + match = re.search(pattern, output, re.IGNORECASE) + if match: + return match.group(1) + + # If no pattern matches, return first line (common for many tools) + first_line = output.split("\n")[0].strip() + if len(first_line) < 100: # Reasonable length for a version string + return first_line + + return None + + @classmethod + def check_package_dependencies(cls, packages: List[str]) -> Dict[str, bool]: + """Check if package dependencies are available.""" + results = {} + + for package in packages: + available = False + + # Try different package managers/methods + if package.startswith("@") or "/" in package: + # Likely npm package + available = cls._check_npm_package(package) + elif package in ["jupyter", "pandas", "numpy", "matplotlib"]: + # Python packages + available = cls._check_python_package(package) + else: + # Try both npm and python + available = cls._check_npm_package( + package + ) or cls._check_python_package(package) + + results[package] = available + + return results + + @classmethod + def _check_npm_package(cls, package: str) -> bool: + """Check if an npm package is available.""" + try: + result = subprocess.run( + ["npm", "list", "-g", package], + capture_output=True, + text=True, + timeout=10, + ) + return result.returncode == 0 + except Exception: + return False + + @classmethod + def _check_python_package(cls, package: str) -> bool: + """Check if a Python package is available.""" + try: + import importlib + + importlib.import_module(package) + return True + except ImportError: + return False + + @classmethod + def get_installation_suggestions(cls, tool_name: str) -> List[str]: + """Get installation suggestions for a missing tool.""" + suggestions = { + "node": [ + "Install Node.js from https://nodejs.org", + "Or use package manager: brew install node (macOS) / sudo apt install nodejs (Ubuntu)", + ], + "npm": ["Usually comes with Node.js - install Node.js first"], + "npx": ["Usually comes with npm 5.2+ - update npm: npm install -g npm"], + "python": [ + "Install Python from https://python.org", + "Or use package manager: brew install python (macOS) / sudo apt install python3 (Ubuntu)", + ], + "python3": ["Same as python - install Python 3.x"], + "pip": ["Usually comes with Python - try: python -m ensurepip"], + "pip3": ["Usually comes with Python 3 - try: python3 -m ensurepip"], + "git": [ + "Install Git from https://git-scm.com", + "Or use package manager: brew install git (macOS) / sudo apt install git (Ubuntu)", + ], + "docker": ["Install Docker from https://docker.com"], + "java": [ + "Install OpenJDK from https://openjdk.java.net", + "Or use package manager: brew install openjdk (macOS) / sudo apt install default-jdk (Ubuntu)", + ], + "jupyter": ["Install with pip: pip install jupyter"], + } + + return suggestions.get(tool_name, [f"Please install {tool_name} manually"]) + + +# Global detector instance +detector = SystemToolDetector() diff --git a/code_puppy/messaging/__init__.py b/code_puppy/messaging/__init__.py new file mode 100644 index 00000000..52f7ae61 --- /dev/null +++ b/code_puppy/messaging/__init__.py @@ -0,0 +1,50 @@ +from .message_queue import ( + MessageQueue, + MessageType, + UIMessage, + emit_agent_reasoning, + emit_agent_response, + emit_command_output, + emit_divider, + emit_error, + emit_info, + emit_message, + emit_planned_next_steps, + emit_prompt, + emit_success, + emit_system_message, + emit_tool_output, + emit_warning, + get_buffered_startup_messages, + get_global_queue, + provide_prompt_response, +) +from .queue_console import QueueConsole, get_queue_console +from .renderers import InteractiveRenderer, SynchronousInteractiveRenderer, TUIRenderer + +__all__ = [ + "MessageQueue", + "MessageType", + "UIMessage", + "get_global_queue", + "emit_message", + "emit_info", + "emit_success", + "emit_warning", + "emit_divider", + "emit_error", + "emit_tool_output", + "emit_command_output", + "emit_agent_reasoning", + "emit_planned_next_steps", + "emit_agent_response", + "emit_system_message", + "emit_prompt", + "provide_prompt_response", + "get_buffered_startup_messages", + "InteractiveRenderer", + "TUIRenderer", + "SynchronousInteractiveRenderer", + "QueueConsole", + "get_queue_console", +] diff --git a/code_puppy/messaging/message_queue.py b/code_puppy/messaging/message_queue.py new file mode 100644 index 00000000..c2b7e1ff --- /dev/null +++ b/code_puppy/messaging/message_queue.py @@ -0,0 +1,381 @@ +""" +Message queue system for decoupling Rich console output from renderers. + +This allows both TUI and interactive modes to consume the same messages +but render them differently based on their capabilities. +""" + +import asyncio +import queue +import threading +from dataclasses import dataclass +from datetime import datetime, timezone +from enum import Enum +from typing import Any, Dict, Optional, Union + +from rich.text import Text + + +class MessageType(Enum): + """Types of messages that can be sent through the queue.""" + + # Basic content types + INFO = "info" + SUCCESS = "success" + WARNING = "warning" + ERROR = "error" + DIVIDER = "divider" + + # Tool-specific types + TOOL_OUTPUT = "tool_output" + COMMAND_OUTPUT = "command_output" + FILE_OPERATION = "file_operation" + + # Agent-specific types + AGENT_REASONING = "agent_reasoning" + PLANNED_NEXT_STEPS = "planned_next_steps" + AGENT_RESPONSE = "agent_response" + AGENT_STATUS = "agent_status" + + # Human interaction types + HUMAN_INPUT_REQUEST = "human_input_request" + + # System types + SYSTEM = "system" + DEBUG = "debug" + + +@dataclass +class UIMessage: + """A message to be displayed in the UI.""" + + type: MessageType + content: Union[str, Text, Any] # Can be Rich Text, Table, Markdown, etc. + timestamp: datetime = None + metadata: Dict[str, Any] = None + + def __post_init__(self): + if self.timestamp is None: + self.timestamp = datetime.now(timezone.utc) + if self.metadata is None: + self.metadata = {} + + +class MessageQueue: + """Thread-safe message queue for UI messages.""" + + def __init__(self, maxsize: int = 1000): + self._queue = queue.Queue(maxsize=maxsize) + self._async_queue = None # Will be created when needed + self._async_queue_maxsize = maxsize + self._listeners = [] + self._running = False + self._thread = None + self._startup_buffer = [] # Buffer messages before any renderer starts + self._has_active_renderer = False + self._event_loop = None # Store reference to the event loop + self._prompt_responses = {} # Store responses to human input requests + self._prompt_id_counter = 0 # Counter for unique prompt IDs + + def start(self): + """Start the queue processing.""" + if self._running: + return + + self._running = True + self._thread = threading.Thread(target=self._process_messages, daemon=True) + self._thread.start() + + def get_buffered_messages(self): + """Get all currently buffered messages without waiting.""" + # First get any startup buffered messages + messages = list(self._startup_buffer) + + # Then get any queued messages + while True: + try: + message = self._queue.get_nowait() + messages.append(message) + except queue.Empty: + break + return messages + + def clear_startup_buffer(self): + """Clear the startup buffer after processing.""" + self._startup_buffer.clear() + + def stop(self): + """Stop the queue processing.""" + self._running = False + if self._thread and self._thread.is_alive(): + self._thread.join(timeout=1.0) + + def emit(self, message: UIMessage): + """Emit a message to the queue.""" + # If no renderer is active yet, buffer the message for startup + if not self._has_active_renderer: + self._startup_buffer.append(message) + return + + try: + self._queue.put_nowait(message) + except queue.Full: + # Drop oldest message to make room + try: + self._queue.get_nowait() + self._queue.put_nowait(message) + except queue.Empty: + pass + + def emit_simple(self, message_type: MessageType, content: Any, **metadata): + """Emit a simple message with just type and content.""" + msg = UIMessage(type=message_type, content=content, metadata=metadata) + self.emit(msg) + + def get_nowait(self) -> Optional[UIMessage]: + """Get a message without blocking.""" + try: + return self._queue.get_nowait() + except queue.Empty: + return None + + async def get_async(self) -> UIMessage: + """Get a message asynchronously.""" + # Lazy initialization of async queue and store event loop reference + if self._async_queue is None: + self._async_queue = asyncio.Queue(maxsize=self._async_queue_maxsize) + self._event_loop = asyncio.get_running_loop() + return await self._async_queue.get() + + def _process_messages(self): + """Process messages from sync to async queue.""" + while self._running: + try: + message = self._queue.get(timeout=0.1) + + # Try to put in async queue if we have an event loop reference + if self._event_loop is not None and self._async_queue is not None: + # Use thread-safe call to put message in async queue + # Create a bound method to avoid closure issues + try: + self._event_loop.call_soon_threadsafe( + self._async_queue.put_nowait, message + ) + except Exception: + # Handle any errors with the async queue operation + pass + + # Notify listeners immediately for sync processing + for listener in self._listeners: + try: + listener(message) + except Exception: + pass # Don't let listener errors break processing + + except queue.Empty: + continue + + def add_listener(self, callback): + """Add a listener for messages (for direct sync consumption).""" + self._listeners.append(callback) + # Mark that we have an active renderer + self._has_active_renderer = True + + def remove_listener(self, callback): + """Remove a listener.""" + if callback in self._listeners: + self._listeners.remove(callback) + # If no more listeners, mark as no active renderer + if not self._listeners: + self._has_active_renderer = False + + def mark_renderer_active(self): + """Mark that a renderer is now active and consuming messages.""" + self._has_active_renderer = True + + def mark_renderer_inactive(self): + """Mark that no renderer is currently active.""" + self._has_active_renderer = False + + def create_prompt_request(self, prompt_text: str) -> str: + """Create a human input request and return its unique ID.""" + self._prompt_id_counter += 1 + prompt_id = f"prompt_{self._prompt_id_counter}" + + # Emit the human input request message + message = UIMessage( + type=MessageType.HUMAN_INPUT_REQUEST, + content=prompt_text, + metadata={"prompt_id": prompt_id}, + ) + self.emit(message) + + return prompt_id + + def wait_for_prompt_response(self, prompt_id: str, timeout: float = None) -> str: + """Wait for a response to a human input request.""" + import time + + start_time = time.time() + + # Check if we're in TUI mode - if so, try to yield control to the event loop + from code_puppy.tui_state import is_tui_mode + + sleep_interval = 0.05 if is_tui_mode() else 0.1 + + # Debug logging for TUI mode + if is_tui_mode(): + print(f"[DEBUG] Waiting for prompt response: {prompt_id}") + + while True: + if prompt_id in self._prompt_responses: + response = self._prompt_responses.pop(prompt_id) + if is_tui_mode(): + print(f"[DEBUG] Got response for {prompt_id}: {response[:20]}...") + return response + + if timeout and (time.time() - start_time) > timeout: + raise TimeoutError( + f"No response received for prompt {prompt_id} within {timeout} seconds" + ) + + time.sleep(sleep_interval) + + def provide_prompt_response(self, prompt_id: str, response: str): + """Provide a response to a human input request.""" + from code_puppy.tui_state import is_tui_mode + + if is_tui_mode(): + print(f"[DEBUG] Providing response for {prompt_id}: {response[:20]}...") + self._prompt_responses[prompt_id] = response + + +# Global message queue instance +_global_queue: Optional[MessageQueue] = None +_queue_lock = threading.Lock() + + +def get_global_queue() -> MessageQueue: + """Get or create the global message queue.""" + global _global_queue + + with _queue_lock: + if _global_queue is None: + _global_queue = MessageQueue() + _global_queue.start() + + return _global_queue + + +def get_buffered_startup_messages(): + """Get any messages that were buffered before renderers started.""" + queue = get_global_queue() + # Only return startup buffer messages, don't clear them yet + messages = list(queue._startup_buffer) + return messages + + +def emit_message(message_type: MessageType, content: Any, **metadata): + """Convenience function to emit a message to the global queue.""" + queue = get_global_queue() + queue.emit_simple(message_type, content, **metadata) + + +def emit_info(content: Any, **metadata): + """Emit an info message.""" + emit_message(MessageType.INFO, content, **metadata) + + +def emit_success(content: Any, **metadata): + """Emit a success message.""" + emit_message(MessageType.SUCCESS, content, **metadata) + + +def emit_warning(content: Any, **metadata): + """Emit a warning message.""" + emit_message(MessageType.WARNING, content, **metadata) + + +def emit_error(content: Any, **metadata): + """Emit an error message.""" + emit_message(MessageType.ERROR, content, **metadata) + + +def emit_tool_output(content: Any, tool_name: str = None, **metadata): + """Emit tool output.""" + if tool_name: + metadata["tool_name"] = tool_name + emit_message(MessageType.TOOL_OUTPUT, content, **metadata) + + +def emit_command_output(content: Any, command: str = None, **metadata): + """Emit command output.""" + if command: + metadata["command"] = command + emit_message(MessageType.COMMAND_OUTPUT, content, **metadata) + + +def emit_agent_reasoning(content: Any, **metadata): + """Emit agent reasoning.""" + emit_message(MessageType.AGENT_REASONING, content, **metadata) + + +def emit_planned_next_steps(content: Any, **metadata): + """Emit planned_next_steps""" + emit_message(MessageType.PLANNED_NEXT_STEPS, content, **metadata) + + +def emit_agent_response(content: Any, **metadata): + """Emit agent_response""" + emit_message(MessageType.AGENT_RESPONSE, content, **metadata) + + +def emit_system_message(content: Any, **metadata): + """Emit a system message.""" + emit_message(MessageType.SYSTEM, content, **metadata) + + +def emit_divider(content: str = "[dim]" + "─" * 100 + "\n" + "[/dim]", **metadata): + """Emit a divider line""" + from code_puppy.tui_state import is_tui_mode + + if not is_tui_mode(): + emit_message(MessageType.DIVIDER, content, **metadata) + else: + pass + + +def emit_prompt(prompt_text: str, timeout: float = None) -> str: + """Emit a human input request and wait for response.""" + from code_puppy.tui_state import is_tui_mode + + # In interactive mode, use direct input instead of the queue system + if not is_tui_mode(): + # Emit the prompt as a message for display + from code_puppy.messaging import emit_info + + emit_info(f"[yellow]{prompt_text}[/yellow]") + + # Get input directly + try: + # Try to use rich console for better formatting + from rich.console import Console + + console = Console() + response = console.input("[cyan]>>> [/cyan]") + return response + except Exception: + # Fallback to basic input + response = input(">>> ") + return response + + # In TUI mode, use the queue system + queue = get_global_queue() + prompt_id = queue.create_prompt_request(prompt_text) + return queue.wait_for_prompt_response(prompt_id, timeout) + + +def provide_prompt_response(prompt_id: str, response: str): + """Provide a response to a human input request.""" + queue = get_global_queue() + queue.provide_prompt_response(prompt_id, response) diff --git a/code_puppy/messaging/queue_console.py b/code_puppy/messaging/queue_console.py new file mode 100644 index 00000000..631d3540 --- /dev/null +++ b/code_puppy/messaging/queue_console.py @@ -0,0 +1,294 @@ +""" +Queue-based console that mimics Rich Console but sends messages to a queue. + +This allows tools to use the same Rich console interface while having +their output captured and routed through our message queue system. +""" + +import traceback +from typing import Any, Optional + +from rich.console import Console +from rich.markdown import Markdown +from rich.table import Table +from rich.text import Text + +from .message_queue import MessageQueue, MessageType, get_global_queue + + +class QueueConsole: + """ + Console-like interface that sends messages to a queue instead of stdout. + + This is designed to be a drop-in replacement for Rich Console that + routes messages through our queue system. + """ + + def __init__( + self, + queue: Optional[MessageQueue] = None, + fallback_console: Optional[Console] = None, + ): + self.queue = queue or get_global_queue() + self.fallback_console = fallback_console or Console() + + def print( + self, + *values: Any, + sep: str = " ", + end: str = "\n", + style: Optional[str] = None, + highlight: bool = True, + **kwargs, + ): + """Print values to the message queue.""" + # Handle Rich objects properly + if len(values) == 1 and hasattr(values[0], "__rich_console__"): + # Single Rich object - pass it through directly + content = values[0] + message_type = self._infer_message_type_from_rich_object(content, style) + else: + # Convert to string, but handle Rich objects properly + processed_values = [] + for v in values: + if hasattr(v, "__rich_console__"): + # For Rich objects, try to extract their text content + from io import StringIO + + from rich.console import Console + + string_io = StringIO() + # Use markup=True to properly process rich styling + # Use a reasonable width to prevent wrapping issues + temp_console = Console( + file=string_io, width=80, legacy_windows=False, markup=True + ) + temp_console.print(v) + processed_values.append(string_io.getvalue().rstrip("\n")) + else: + processed_values.append(str(v)) + + content = sep.join(processed_values) + end + message_type = self._infer_message_type(content, style) + + # Create Rich Text object if style is provided and content is string + if style and isinstance(content, str): + content = Text(content, style=style) + + # Emit to queue + self.queue.emit_simple( + message_type, content, style=style, highlight=highlight, **kwargs + ) + + def print_exception( + self, + *, + width: Optional[int] = None, + extra_lines: int = 3, + theme: Optional[str] = None, + word_wrap: bool = False, + show_locals: bool = False, + indent_guides: bool = True, + suppress: tuple = (), + max_frames: int = 100, + ): + """Print exception information to the queue.""" + # Get the exception traceback + exc_text = traceback.format_exc() + + # Emit as error message + self.queue.emit_simple( + MessageType.ERROR, + f"Exception:\n{exc_text}", + exception=True, + show_locals=show_locals, + ) + + def log( + self, + *values: Any, + sep: str = " ", + end: str = "\n", + style: Optional[str] = None, + justify: Optional[str] = None, + emoji: Optional[bool] = None, + markup: Optional[bool] = None, + highlight: Optional[bool] = None, + log_locals: bool = False, + ): + """Log a message (similar to print but with logging semantics).""" + content = sep.join(str(v) for v in values) + end + + # Log messages are typically informational + message_type = MessageType.INFO + if style: + message_type = self._infer_message_type(content, style) + + if style and isinstance(content, str): + content = Text(content, style=style) + + self.queue.emit_simple( + message_type, content, log=True, style=style, log_locals=log_locals + ) + + def _infer_message_type_from_rich_object( + self, content: Any, style: Optional[str] = None + ) -> MessageType: + """Infer message type from Rich object type and style.""" + if style: + style_lower = style.lower() + if "red" in style_lower or "error" in style_lower: + return MessageType.ERROR + elif "yellow" in style_lower or "warning" in style_lower: + return MessageType.WARNING + elif "green" in style_lower or "success" in style_lower: + return MessageType.SUCCESS + elif "blue" in style_lower: + return MessageType.INFO + elif "purple" in style_lower or "magenta" in style_lower: + return MessageType.AGENT_REASONING + elif "dim" in style_lower: + return MessageType.SYSTEM + + # Infer from object type + if isinstance(content, Markdown): + return MessageType.AGENT_REASONING + elif isinstance(content, Table): + return MessageType.TOOL_OUTPUT + elif hasattr(content, "lexer_name"): # Syntax object + return MessageType.TOOL_OUTPUT + + return MessageType.INFO + + def _infer_message_type( + self, content: str, style: Optional[str] = None + ) -> MessageType: + """Infer message type from content and style.""" + if style: + style_lower = style.lower() + if "red" in style_lower or "error" in style_lower: + return MessageType.ERROR + elif "yellow" in style_lower or "warning" in style_lower: + return MessageType.WARNING + elif "green" in style_lower or "success" in style_lower: + return MessageType.SUCCESS + elif "blue" in style_lower: + return MessageType.INFO + elif "purple" in style_lower or "magenta" in style_lower: + return MessageType.AGENT_REASONING + elif "dim" in style_lower: + return MessageType.SYSTEM + + # Infer from content patterns + content_lower = content.lower() + if any(word in content_lower for word in ["error", "failed", "exception"]): + return MessageType.ERROR + elif any(word in content_lower for word in ["warning", "warn"]): + return MessageType.WARNING + elif any(word in content_lower for word in ["success", "completed", "done"]): + return MessageType.SUCCESS + elif any(word in content_lower for word in ["tool", "command", "running"]): + return MessageType.TOOL_OUTPUT + + return MessageType.INFO + + # Additional methods to maintain Rich Console compatibility + def rule(self, title: str = "", *, align: str = "center", style: str = "rule.line"): + """Print a horizontal rule.""" + self.queue.emit_simple( + MessageType.SYSTEM, + f"─── {title} ───" if title else "─" * 40, + rule=True, + style=style, + ) + + def status(self, status: str, *, spinner: str = "dots"): + """Show a status message (simplified).""" + self.queue.emit_simple( + MessageType.INFO, f"⏳ {status}", status=True, spinner=spinner + ) + + def input(self, prompt: str = "") -> str: + """Get user input without spinner interference. + + This method coordinates with the TUI to pause any running spinners + and properly display the user input prompt. + """ + # Set the global flag that we're awaiting user input + from code_puppy.tools.command_runner import set_awaiting_user_input + + set_awaiting_user_input(True) + + # Signal TUI to pause spinner and prepare for user input (legacy method) + try: + # Try to get the current TUI app instance and pause spinner + from textual.app import App + + current_app = App.get_running_app() + if hasattr(current_app, "pause_spinner_for_input"): + current_app.pause_spinner_for_input() + except Exception: + # If we can't pause the spinner (not in TUI mode), continue anyway + pass + + # Emit the prompt as a system message so it shows in the TUI chat + if prompt: + self.queue.emit_simple(MessageType.SYSTEM, prompt, requires_user_input=True) + + # Create a new, isolated console instance specifically for input + # This bypasses any spinner or queue system interference + input_console = Console(file=__import__("sys").stderr, force_terminal=True) + + # Clear any spinner artifacts and position cursor properly + if prompt: + input_console.print(prompt, end="", style="bold cyan") + + # Use regular input() which will read from stdin + # Since we printed the prompt to stderr, this should work cleanly + try: + user_response = input() + + # Show the user's response in the chat as well + if user_response: + self.queue.emit_simple( + MessageType.USER, f"User response: {user_response}" + ) + + return user_response + except (KeyboardInterrupt, EOFError): + # Handle interruption gracefully + input_console.print("\n[yellow]Input cancelled[/yellow]") + self.queue.emit_simple(MessageType.WARNING, "User input cancelled") + return "" + finally: + # Clear the global flag for awaiting user input + from code_puppy.tools.command_runner import set_awaiting_user_input + + set_awaiting_user_input(False) + + # Signal TUI to resume spinner if needed (legacy method) + try: + from textual.app import App + + current_app = App.get_running_app() + if hasattr(current_app, "resume_spinner_after_input"): + current_app.resume_spinner_after_input() + except Exception: + # If we can't resume the spinner, continue anyway + pass + + # File-like interface for compatibility + @property + def file(self): + """Get the current file (for compatibility).""" + return self.fallback_console.file + + @file.setter + def file(self, value): + """Set the current file (for compatibility).""" + self.fallback_console.file = value + + +def get_queue_console(queue: Optional[MessageQueue] = None) -> QueueConsole: + """Get a QueueConsole instance.""" + return QueueConsole(queue or get_global_queue()) diff --git a/code_puppy/messaging/renderers.py b/code_puppy/messaging/renderers.py new file mode 100644 index 00000000..9e822950 --- /dev/null +++ b/code_puppy/messaging/renderers.py @@ -0,0 +1,414 @@ +""" +Renderer implementations for different UI modes. + +These renderers consume messages from the queue and display them +appropriately for their respective interfaces. +""" + +import asyncio +import threading +from abc import ABC, abstractmethod +from io import StringIO +from typing import Optional + +from rich.console import Console +from rich.markdown import Markdown + +from .message_queue import MessageQueue, MessageType, UIMessage + + +class MessageRenderer(ABC): + """Base class for message renderers.""" + + def __init__(self, queue: MessageQueue): + self.queue = queue + self._running = False + self._task = None + + @abstractmethod + async def render_message(self, message: UIMessage): + """Render a single message.""" + pass + + async def start(self): + """Start the renderer.""" + if self._running: + return + + self._running = True + # Mark the queue as having an active renderer + self.queue.mark_renderer_active() + self._task = asyncio.create_task(self._consume_messages()) + + async def stop(self): + """Stop the renderer.""" + self._running = False + # Mark the queue as having no active renderer + self.queue.mark_renderer_inactive() + if self._task: + self._task.cancel() + try: + await self._task + except asyncio.CancelledError: + pass + + async def _consume_messages(self): + """Consume messages from the queue.""" + while self._running: + try: + message = await asyncio.wait_for(self.queue.get_async(), timeout=0.1) + await self.render_message(message) + except asyncio.TimeoutError: + continue + except asyncio.CancelledError: + break + except Exception as e: + # Log error but continue processing + print(f"Error rendering message: {e}") + + +class InteractiveRenderer(MessageRenderer): + """Renderer for interactive CLI mode using Rich console. + + Note: This async-based renderer is not currently used in the codebase. + Interactive mode currently uses SynchronousInteractiveRenderer instead. + A future refactoring might consolidate these renderers. + """ + + def __init__(self, queue: MessageQueue, console: Optional[Console] = None): + super().__init__(queue) + self.console = console or Console() + + async def render_message(self, message: UIMessage): + """Render a message using Rich console.""" + # Handle human input requests + if message.type == MessageType.HUMAN_INPUT_REQUEST: + await self._handle_human_input_request(message) + return + + # Convert message type to appropriate Rich styling + if message.type == MessageType.ERROR: + style = "bold red" + elif message.type == MessageType.WARNING: + style = "yellow" + elif message.type == MessageType.SUCCESS: + style = "green" + elif message.type == MessageType.TOOL_OUTPUT: + style = "blue" + elif message.type == MessageType.AGENT_REASONING: + style = None + elif message.type == MessageType.PLANNED_NEXT_STEPS: + style = None + elif message.type == MessageType.AGENT_RESPONSE: + # Special handling for agent responses - they'll be rendered as markdown + style = None + elif message.type == MessageType.SYSTEM: + style = None + else: + style = None + + # Render the content + if isinstance(message.content, str): + if message.type == MessageType.AGENT_RESPONSE: + # Render agent responses as markdown + try: + markdown = Markdown(message.content) + self.console.print(markdown) + except Exception: + # Fallback to plain text if markdown parsing fails + self.console.print(message.content) + elif style: + self.console.print(message.content, style=style) + else: + self.console.print(message.content) + else: + # For complex Rich objects (Tables, Markdown, Text, etc.) + self.console.print(message.content) + + # Ensure output is immediately flushed to the terminal + # This fixes the issue where messages don't appear until user input + if hasattr(self.console.file, "flush"): + self.console.file.flush() + + async def _handle_human_input_request(self, message: UIMessage): + """Handle a human input request in async mode.""" + # This renderer is not currently used in practice, but if it were: + # We would need async input handling here + # For now, just render as a system message + self.console.print(f"[bold cyan]INPUT REQUESTED:[/bold cyan] {message.content}") + if hasattr(self.console.file, "flush"): + self.console.file.flush() + + +class TUIRenderer(MessageRenderer): + """Renderer for TUI mode that adds messages to the chat view.""" + + def __init__(self, queue: MessageQueue, tui_app=None): + super().__init__(queue) + self.tui_app = tui_app + + def set_tui_app(self, app): + """Set the TUI app reference.""" + self.tui_app = app + + async def render_message(self, message: UIMessage): + """Render a message in the TUI chat view.""" + if not self.tui_app: + return + + # Handle human input requests + if message.type == MessageType.HUMAN_INPUT_REQUEST: + await self._handle_human_input_request(message) + return + + # Extract group_id from message metadata (fixing the key name) + group_id = message.metadata.get("message_group") if message.metadata else None + + # For INFO messages with Rich objects (like Markdown), preserve them for proper rendering + if message.type == MessageType.INFO and hasattr( + message.content, "__rich_console__" + ): + # Pass the Rich object directly to maintain markdown formatting + self.tui_app.add_system_message_rich( + message.content, message_group=group_id + ) + return + + # Convert content to string for TUI display (for all other cases) + if hasattr(message.content, "__rich_console__"): + # For Rich objects, render to plain text using a Console + string_io = StringIO() + # Use markup=False to prevent interpretation of square brackets as markup + temp_console = Console( + file=string_io, width=80, legacy_windows=False, markup=False + ) + temp_console.print(message.content) + content_str = string_io.getvalue().rstrip("\n") + else: + content_str = str(message.content) + + # Map message types to TUI message types - ALL get group_id now + if message.type in (MessageType.ERROR,): + self.tui_app.add_error_message(content_str, message_group=group_id) + elif message.type in ( + MessageType.SYSTEM, + MessageType.INFO, + MessageType.WARNING, + MessageType.SUCCESS, + ): + self.tui_app.add_system_message(content_str, message_group=group_id) + elif message.type == MessageType.AGENT_REASONING: + # Agent reasoning messages should use the dedicated method + self.tui_app.add_agent_reasoning_message( + content_str, message_group=group_id + ) + elif message.type == MessageType.PLANNED_NEXT_STEPS: + # Agent reasoning messages should use the dedicated method + self.tui_app.add_planned_next_steps_message( + content_str, message_group=group_id + ) + elif message.type in ( + MessageType.TOOL_OUTPUT, + MessageType.COMMAND_OUTPUT, + MessageType.AGENT_RESPONSE, + ): + # These are typically agent/tool outputs + self.tui_app.add_agent_message(content_str, message_group=group_id) + else: + # Default to system message + self.tui_app.add_system_message(content_str, message_group=group_id) + + async def _handle_human_input_request(self, message: UIMessage): + """Handle a human input request in TUI mode.""" + try: + print("[DEBUG] TUI renderer handling human input request") + + # Check if tui_app is available + if not self.tui_app: + print("[DEBUG] No tui_app available, falling back to error response") + prompt_id = ( + message.metadata.get("prompt_id") if message.metadata else None + ) + if prompt_id: + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(prompt_id, "") + return + + prompt_id = message.metadata.get("prompt_id") if message.metadata else None + if not prompt_id: + print("[DEBUG] No prompt_id in message metadata") + self.tui_app.add_error_message("Error: Invalid human input request") + return + + # For now, use a simple fallback instead of modal to avoid crashes + print("[DEBUG] Using fallback approach - showing prompt as message") + self.tui_app.add_system_message( + f"[yellow]INPUT NEEDED:[/yellow] {str(message.content)}" + ) + self.tui_app.add_system_message( + "[dim]This would normally show a modal, but using fallback to prevent crashes[/dim]" + ) + + # Provide empty response for now to unblock the waiting thread + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(prompt_id, "") + + except Exception as e: + print(f"[DEBUG] Top-level exception in _handle_human_input_request: {e}") + import traceback + + traceback.print_exc() + # Last resort - provide empty response to prevent hanging + try: + prompt_id = ( + message.metadata.get("prompt_id") if message.metadata else None + ) + if prompt_id: + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(prompt_id, "") + except Exception: + pass # Can't do anything more + + +class SynchronousInteractiveRenderer: + """ + Synchronous renderer for interactive mode that doesn't require async. + + This is useful for cases where we want immediate rendering without + the overhead of async message processing. + + Note: As part of the messaging system refactoring, we're keeping this class for now + as it's essential for the interactive mode to function properly. Future refactoring + could replace this with a simpler implementation that leverages the unified message + queue system more effectively, or potentially convert interactive mode to use + async/await consistently and use InteractiveRenderer instead. + + Current responsibilities: + - Consumes messages from the queue in a background thread + - Renders messages to the console in real-time without requiring async code + - Registers as a direct listener to the message queue for immediate processing + """ + + def __init__(self, queue: MessageQueue, console: Optional[Console] = None): + self.queue = queue + self.console = console or Console() + self._running = False + self._thread = None + + def start(self): + """Start the synchronous renderer in a background thread.""" + if self._running: + return + + self._running = True + # Mark the queue as having an active renderer + self.queue.mark_renderer_active() + # Add ourselves as a listener for immediate processing + self.queue.add_listener(self._render_message) + self._thread = threading.Thread(target=self._consume_messages, daemon=True) + self._thread.start() + + def stop(self): + """Stop the synchronous renderer.""" + self._running = False + # Mark the queue as having no active renderer + self.queue.mark_renderer_inactive() + # Remove ourselves as a listener + self.queue.remove_listener(self._render_message) + if self._thread and self._thread.is_alive(): + self._thread.join(timeout=1.0) + + def _consume_messages(self): + """Consume messages synchronously.""" + while self._running: + message = self.queue.get_nowait() + if message: + self._render_message(message) + else: + # No messages, sleep briefly + import time + + time.sleep(0.01) + + def _render_message(self, message: UIMessage): + """Render a message using Rich console.""" + # Handle human input requests + if message.type == MessageType.HUMAN_INPUT_REQUEST: + self._handle_human_input_request(message) + return + + # Convert message type to appropriate Rich styling + if message.type == MessageType.ERROR: + style = "bold red" + elif message.type == MessageType.WARNING: + style = "yellow" + elif message.type == MessageType.SUCCESS: + style = "green" + elif message.type == MessageType.TOOL_OUTPUT: + style = "blue" + elif message.type == MessageType.AGENT_REASONING: + style = None + elif message.type == MessageType.AGENT_RESPONSE: + # Special handling for agent responses - they'll be rendered as markdown + style = None + elif message.type == MessageType.SYSTEM: + style = None + else: + style = None + + # Render the content + if isinstance(message.content, str): + if message.type == MessageType.AGENT_RESPONSE: + # Render agent responses as markdown + try: + markdown = Markdown(message.content) + self.console.print(markdown) + except Exception: + # Fallback to plain text if markdown parsing fails + self.console.print(message.content) + elif style: + self.console.print(message.content, style=style) + else: + self.console.print(message.content) + else: + # For complex Rich objects (Tables, Markdown, Text, etc.) + self.console.print(message.content) + + # Ensure output is immediately flushed to the terminal + # This fixes the issue where messages don't appear until user input + if hasattr(self.console.file, "flush"): + self.console.file.flush() + + def _handle_human_input_request(self, message: UIMessage): + """Handle a human input request in interactive mode.""" + prompt_id = message.metadata.get("prompt_id") if message.metadata else None + if not prompt_id: + self.console.print( + "[bold red]Error: Invalid human input request[/bold red]" + ) + return + + # Display the prompt + self.console.print(f"[bold cyan]{message.content}[/bold cyan]") + if hasattr(self.console.file, "flush"): + self.console.file.flush() + + # Get user input + try: + # Use basic input for now - could be enhanced with prompt_toolkit later + response = input(">>> ") + + # Provide the response back to the queue + from .message_queue import provide_prompt_response + + provide_prompt_response(prompt_id, response) + + except (EOFError, KeyboardInterrupt): + # Handle Ctrl+C or Ctrl+D + provide_prompt_response(prompt_id, "") + except Exception as e: + self.console.print(f"[bold red]Error getting input: {e}[/bold red]") + provide_prompt_response(prompt_id, "") diff --git a/code_puppy/messaging/spinner/__init__.py b/code_puppy/messaging/spinner/__init__.py new file mode 100644 index 00000000..ced2d05a --- /dev/null +++ b/code_puppy/messaging/spinner/__init__.py @@ -0,0 +1,67 @@ +""" +Shared spinner implementation for both TUI and CLI modes. + +This module provides consistent spinner animations across different UI modes. +""" + +from .console_spinner import ConsoleSpinner +from .spinner_base import SpinnerBase +from .textual_spinner import TextualSpinner + +# Keep track of all active spinners to manage them globally +_active_spinners = [] + + +def register_spinner(spinner): + """Register an active spinner to be managed globally.""" + if spinner not in _active_spinners: + _active_spinners.append(spinner) + + +def unregister_spinner(spinner): + """Remove a spinner from global management.""" + if spinner in _active_spinners: + _active_spinners.remove(spinner) + + +def pause_all_spinners(): + """Pause all active spinners.""" + for spinner in _active_spinners: + try: + spinner.pause() + except Exception: + # Ignore errors if a spinner can't be paused + pass + + +def resume_all_spinners(): + """Resume all active spinners.""" + for spinner in _active_spinners: + try: + spinner.resume() + except Exception: + # Ignore errors if a spinner can't be resumed + pass + + +def update_spinner_context(info: str) -> None: + """Update the shared context information displayed beside active spinners.""" + SpinnerBase.set_context_info(info) + + +def clear_spinner_context() -> None: + """Clear any context information displayed beside active spinners.""" + SpinnerBase.clear_context_info() + + +__all__ = [ + "SpinnerBase", + "TextualSpinner", + "ConsoleSpinner", + "register_spinner", + "unregister_spinner", + "pause_all_spinners", + "resume_all_spinners", + "update_spinner_context", + "clear_spinner_context", +] diff --git a/code_puppy/messaging/spinner/console_spinner.py b/code_puppy/messaging/spinner/console_spinner.py new file mode 100644 index 00000000..e06aa34c --- /dev/null +++ b/code_puppy/messaging/spinner/console_spinner.py @@ -0,0 +1,205 @@ +""" +Console spinner implementation for CLI mode using Rich's Live Display. +""" + +import threading +import time + +from rich.console import Console +from rich.live import Live +from rich.text import Text + +from .spinner_base import SpinnerBase + + +class ConsoleSpinner(SpinnerBase): + """A console-based spinner implementation using Rich's Live Display.""" + + def __init__(self, console=None): + """Initialize the console spinner. + + Args: + console: Optional Rich console instance to use for output. + If not provided, a new one will be created. + """ + super().__init__() + self.console = console or Console() + self._thread = None + self._stop_event = threading.Event() + self._paused = False + self._live = None + + # Register this spinner for global management + from . import register_spinner + + register_spinner(self) + + def start(self): + """Start the spinner animation.""" + super().start() + self._stop_event.clear() + + # Don't start a new thread if one is already running + if self._thread and self._thread.is_alive(): + return + + # Create a Live display for the spinner + self._live = Live( + self._generate_spinner_panel(), + console=self.console, + refresh_per_second=20, + transient=True, + auto_refresh=False, # Don't auto-refresh to avoid wiping out user input + ) + self._live.start() + + # Start a thread to update the spinner frames + self._thread = threading.Thread(target=self._update_spinner) + self._thread.daemon = True + self._thread.start() + + def stop(self): + """Stop the spinner animation.""" + if not self._is_spinning: + return + + self._stop_event.set() + self._is_spinning = False + + if self._live: + self._live.stop() + self._live = None + + if self._thread and self._thread.is_alive(): + self._thread.join(timeout=0.5) + + self._thread = None + + # Unregister this spinner from global management + from . import unregister_spinner + + unregister_spinner(self) + + def update_frame(self): + """Update to the next frame.""" + super().update_frame() + + def _generate_spinner_panel(self): + """Generate a Rich panel containing the spinner text.""" + if self._paused: + return Text("") + + text = Text() + + # Check if we're awaiting user input to determine which message to show + from code_puppy.tools.command_runner import is_awaiting_user_input + + if is_awaiting_user_input(): + # Show waiting message when waiting for user input + text.append(SpinnerBase.WAITING_MESSAGE, style="bold cyan") + else: + # Show thinking message during normal processing + text.append(SpinnerBase.THINKING_MESSAGE, style="bold cyan") + + text.append(self.current_frame, style="bold cyan") + + context_info = SpinnerBase.get_context_info() + if context_info: + text.append(" ") + text.append(context_info, style="bold white") + + # Return a simple Text object instead of a Panel for a cleaner look + return text + + def _update_spinner(self): + """Update the spinner in a background thread.""" + try: + while not self._stop_event.is_set(): + # Update the frame + self.update_frame() + + # Check if we're awaiting user input before updating the display + from code_puppy.tools.command_runner import is_awaiting_user_input + + awaiting_input = is_awaiting_user_input() + + # Update the live display only if not paused and not awaiting input + if self._live and not self._paused and not awaiting_input: + # Manually refresh instead of auto-refresh to avoid wiping input + self._live.update(self._generate_spinner_panel()) + self._live.refresh() + + # Short sleep to control animation speed + time.sleep(0.05) + except Exception as e: + print(f"\nSpinner error: {e}") + self._is_spinning = False + + def pause(self): + """Pause the spinner animation.""" + if self._is_spinning: + self._paused = True + # Update the live display to hide the spinner immediately + if self._live: + try: + # When pausing, first update with the waiting message + # so it's visible briefly before disappearing + from code_puppy.tools.command_runner import is_awaiting_user_input + + if is_awaiting_user_input(): + text = Text() + text.append(SpinnerBase.WAITING_MESSAGE, style="bold cyan") + text.append(self.current_frame, style="bold cyan") + self._live.update(text) + self._live.refresh() + # Allow a moment for the waiting message to be visible + import time + + time.sleep(0.1) + + # Then clear the display + self._live.update(Text("")) + except Exception: + # If update fails, try stopping it completely + try: + self._live.stop() + except Exception: + pass + + def resume(self): + """Resume the spinner animation.""" + # Check if we should show a spinner - don't resume if waiting for user input + from code_puppy.tools.command_runner import is_awaiting_user_input + + if is_awaiting_user_input(): + return # Don't resume if waiting for user input + + if self._is_spinning and self._paused: + self._paused = False + # Force an immediate update to show the spinner again + if self._live: + try: + self._live.update(self._generate_spinner_panel()) + except Exception: + # If update fails, the live display might have been stopped + # Try to restart it + try: + self._live = Live( + self._generate_spinner_panel(), + console=self.console, + refresh_per_second=10, + transient=True, + auto_refresh=False, # Don't auto-refresh to avoid wiping out user input + ) + self._live.start() + except Exception: + pass + + def __enter__(self): + """Support for context manager.""" + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Clean up when exiting context manager.""" + self.stop() diff --git a/code_puppy/messaging/spinner/spinner_base.py b/code_puppy/messaging/spinner/spinner_base.py new file mode 100644 index 00000000..f5c1f528 --- /dev/null +++ b/code_puppy/messaging/spinner/spinner_base.py @@ -0,0 +1,97 @@ +""" +Base spinner implementation to be extended for different UI modes. +""" + +from abc import ABC, abstractmethod +from threading import Lock + +from code_puppy.config import get_puppy_name + + +class SpinnerBase(ABC): + """Abstract base class for spinner implementations.""" + + # Shared spinner frames across implementations + FRAMES = [ + "(🐶 ) ", + "( 🐶 ) ", + "( 🐶 ) ", + "( 🐶 ) ", + "( 🐶) ", + "( 🐶 ) ", + "( 🐶 ) ", + "( 🐶 ) ", + "(🐶 ) ", + ] + puppy_name = get_puppy_name().title() + + # Default message when processing + THINKING_MESSAGE = f"{puppy_name} is thinking... " + + # Message when waiting for user input + WAITING_MESSAGE = f"{puppy_name} is waiting... " + + # Current message - starts with thinking by default + MESSAGE = THINKING_MESSAGE + + _context_info: str = "" + _context_lock: Lock = Lock() + + def __init__(self): + """Initialize the spinner.""" + self._is_spinning = False + self._frame_index = 0 + + @abstractmethod + def start(self): + """Start the spinner animation.""" + self._is_spinning = True + self._frame_index = 0 + + @abstractmethod + def stop(self): + """Stop the spinner animation.""" + self._is_spinning = False + + @abstractmethod + def update_frame(self): + """Update to the next frame.""" + if self._is_spinning: + self._frame_index = (self._frame_index + 1) % len(self.FRAMES) + + @property + def current_frame(self): + """Get the current frame.""" + return self.FRAMES[self._frame_index] + + @property + def is_spinning(self): + """Check if the spinner is currently spinning.""" + return self._is_spinning + + @classmethod + def set_context_info(cls, info: str) -> None: + """Set shared context information displayed beside the spinner.""" + with cls._context_lock: + cls._context_info = info + + @classmethod + def clear_context_info(cls) -> None: + """Clear any context information displayed beside the spinner.""" + cls.set_context_info("") + + @classmethod + def get_context_info(cls) -> str: + """Return the current spinner context information.""" + with cls._context_lock: + return cls._context_info + + @staticmethod + def format_context_info(total_tokens: int, capacity: int, proportion: float) -> str: + """Create a concise context summary for spinner display.""" + if capacity <= 0: + return "" + proportion_pct = proportion * 100 + return ( + f"Tokens: {total_tokens:,}/{capacity:,} ({proportion_pct:.1f}% used)" + ) diff --git a/code_puppy/messaging/spinner/textual_spinner.py b/code_puppy/messaging/spinner/textual_spinner.py new file mode 100644 index 00000000..885a36de --- /dev/null +++ b/code_puppy/messaging/spinner/textual_spinner.py @@ -0,0 +1,106 @@ +""" +Textual spinner implementation for TUI mode. +""" + +from textual.widgets import Static + +from .spinner_base import SpinnerBase + + +class TextualSpinner(Static): + """A textual spinner widget based on the SimpleSpinnerWidget.""" + + # Use the frames from SpinnerBase + FRAMES = SpinnerBase.FRAMES + + def __init__(self, **kwargs): + """Initialize the textual spinner.""" + super().__init__("", **kwargs) + self._frame_index = 0 + self._is_spinning = False + self._timer = None + self._paused = False + self._previous_state = "" + + # Register this spinner for global management + from . import register_spinner + + register_spinner(self) + + def start_spinning(self): + """Start the spinner animation using Textual's timer system.""" + if not self._is_spinning: + self._is_spinning = True + self._frame_index = 0 + self.update_frame_display() + # Start the animation timer using Textual's timer system + self._timer = self.set_interval(0.10, self.update_frame_display) + + def stop_spinning(self): + """Stop the spinner animation.""" + self._is_spinning = False + if self._timer: + self._timer.stop() + self._timer = None + self.update("") + + # Unregister this spinner from global management + from . import unregister_spinner + + unregister_spinner(self) + + def update_frame(self): + """Update to the next frame.""" + if self._is_spinning: + self._frame_index = (self._frame_index + 1) % len(self.FRAMES) + + def update_frame_display(self): + """Update the display with the current frame.""" + if self._is_spinning: + self.update_frame() + current_frame = self.FRAMES[self._frame_index] + + # Check if we're awaiting user input to determine which message to show + from code_puppy.tools.command_runner import is_awaiting_user_input + + if is_awaiting_user_input(): + # Show waiting message when waiting for user input + message = SpinnerBase.WAITING_MESSAGE + else: + # Show thinking message during normal processing + message = SpinnerBase.THINKING_MESSAGE + + context_info = SpinnerBase.get_context_info() + context_segment = ( + f" [bold white]{context_info}[/bold white]" if context_info else "" + ) + + self.update( + f"[bold cyan]{message}[/bold cyan][bold cyan]{current_frame}[/bold cyan]{context_segment}" + ) + + def pause(self): + """Pause the spinner animation temporarily.""" + if self._is_spinning and self._timer and not self._paused: + self._paused = True + self._timer.pause() + # Store current state but don't clear it completely + self._previous_state = self.renderable + self.update("") + + def resume(self): + """Resume a paused spinner animation.""" + # Check if we should show a spinner - don't resume if waiting for user input + from code_puppy.tools.command_runner import is_awaiting_user_input + + if is_awaiting_user_input(): + return # Don't resume if waiting for user input + + if self._is_spinning and self._timer and self._paused: + self._paused = False + self._timer.resume() + # Restore previous state instead of immediately updating display + if self._previous_state: + self.update(self._previous_state) + else: + self.update_frame_display() diff --git a/code_puppy/model_factory.py b/code_puppy/model_factory.py index ed5bcffa..7683cd29 100644 --- a/code_puppy/model_factory.py +++ b/code_puppy/model_factory.py @@ -1,126 +1,136 @@ -import os import json -import asyncio -import time -from typing import Dict, Any -from pydantic_ai.models.gemini import GeminiModel -from pydantic_ai.models.openai import OpenAIModel -from pydantic_ai.providers.google_gla import GoogleGLAProvider -from pydantic_ai.providers.openai import OpenAIProvider +import logging +import os +import pathlib +from typing import Any, Dict + import httpx -from httpx import Response -import threading -from collections import deque +from anthropic import AsyncAnthropic +from openai import AsyncAzureOpenAI +from pydantic_ai.models.anthropic import AnthropicModel +from pydantic_ai.models.google import GoogleModel +from pydantic_ai.models.openai import OpenAIChatModel +from pydantic_ai.providers.anthropic import AnthropicProvider +from pydantic_ai.providers.cerebras import CerebrasProvider +from pydantic_ai.providers.google import GoogleProvider +from pydantic_ai.providers.openai import OpenAIProvider +from pydantic_ai.providers.openrouter import OpenRouterProvider + +from . import callbacks +from .config import EXTRA_MODELS_FILE +from .http_utils import create_async_client +from .round_robin_model import RoundRobinModel # Environment variables used in this module: # - GEMINI_API_KEY: API key for Google's Gemini models. Required when using Gemini models. # - OPENAI_API_KEY: API key for OpenAI models. Required when using OpenAI models or custom_openai endpoints. +# - TOGETHER_AI_KEY: API key for Together AI models. Required when using Together AI models. # # When using custom endpoints (type: "custom_openai" in models.json): # - Environment variables can be referenced in header values by prefixing with $ in models.json. # Example: "X-Api-Key": "$OPENAI_API_KEY" will use the value from os.environ.get("OPENAI_API_KEY") -def make_client( - max_requests_per_minute: int = 10, max_retries: int = 3, retry_base_delay: int = 10 -) -> httpx.AsyncClient: - # Create a rate limiter using a token bucket approach - class RateLimiter: - def __init__(self, max_requests_per_minute): - self.max_requests_per_minute = max_requests_per_minute - self.interval = ( - 60.0 / max_requests_per_minute - ) # Time between requests in seconds - self.request_times = deque(maxlen=max_requests_per_minute) - self.lock = threading.Lock() - - async def acquire(self): - """Wait until a request can be made according to the rate limit.""" - while True: - with self.lock: - now = time.time() - - # Remove timestamps older than 1 minute - while self.request_times and now - self.request_times[0] > 60: - self.request_times.popleft() - - # If we haven't reached the limit, add the timestamp and proceed - if len(self.request_times) < self.max_requests_per_minute: - self.request_times.append(now) - return - - # Otherwise, calculate the wait time until we can make another request - oldest = self.request_times[0] - wait_time = max(0, oldest + 60 - now) - - if wait_time > 0: - print( - f"Rate limit would be exceeded. Waiting {wait_time:.2f} seconds before sending request." - ) - await asyncio.sleep(wait_time) - else: - # Try again immediately - continue - - # Create the rate limiter instance - rate_limiter = RateLimiter(max_requests_per_minute) - - def should_retry(response: Response) -> bool: - return response.status_code == 429 or (500 <= response.status_code < 600) +class ZaiChatModel(OpenAIChatModel): + def _process_response(self, response): + response.object = 'chat.completion' + return super()._process_response(response) - async def request_hook(request): - # Wait until we can make a request according to our rate limit - await rate_limiter.acquire() - return request - async def response_hook(response: Response) -> Response: - retries = getattr(response.request, "_retries", 0) +def get_custom_config(model_config): + custom_config = model_config.get("custom_endpoint", {}) + if not custom_config: + raise ValueError("Custom model requires 'custom_endpoint' configuration") - if should_retry(response) and retries < max_retries: - setattr(response.request, "_retries", retries + 1) + url = custom_config.get("url") + if not url: + raise ValueError("Custom endpoint requires 'url' field") - delay = retry_base_delay * (2**retries) - - if response.status_code == 429: - print( - f"Rate limit exceeded. Retrying in {delay:.2f} seconds (attempt {retries + 1}/{max_retries})" + headers = {} + for key, value in custom_config.get("headers", {}).items(): + if value.startswith("$"): + env_var_name = value[1:] + resolved_value = os.environ.get(env_var_name) + if resolved_value is None: + raise ValueError( + f"Environment variable '{env_var_name}' is required for custom endpoint headers but is not set. " + f"Please set the environment variable: export {env_var_name}=your_value" ) - else: - print( - f"Server error {response.status_code}. Retrying in {delay:.2f} seconds (attempt {retries + 1}/{max_retries})" - ) - - await asyncio.sleep(delay) - - new_request = response.request.copy() - async with httpx.AsyncClient() as client: - # Apply rate limiting to the retry request as well - await rate_limiter.acquire() - new_response = await client.request( - new_request.method, - str(new_request.url), - headers=new_request.headers, - content=new_request.content, - params=dict(new_request.url.params), + value = resolved_value + elif "$" in value: + tokens = value.split(" ") + resolved_values = [] + for token in tokens: + if token.startswith("$"): + env_var = token[1:] + resolved_value = os.environ.get(env_var) + if resolved_value is None: + raise ValueError( + f"Environment variable '{env_var}' is required for custom endpoint headers but is not set. " + f"Please set the environment variable: export {env_var}=your_value" + ) + resolved_values.append(resolved_value) + else: + resolved_values.append(token) + value = " ".join(resolved_values) + headers[key] = value + api_key = None + if "api_key" in custom_config: + if custom_config["api_key"].startswith("$"): + env_var_name = custom_config["api_key"][1:] + api_key = os.environ.get(env_var_name) + if api_key is None: + raise ValueError( + f"Environment variable '{env_var_name}' is required for custom endpoint API key but is not set. " + f"Please set the environment variable: export {env_var_name}=your_value" ) - return new_response - return response - - # Setup both request and response hooks - event_hooks = {"request": [request_hook], "response": [response_hook]} - - client = httpx.AsyncClient(event_hooks=event_hooks) - return client + else: + api_key = custom_config["api_key"] + if "ca_certs_path" in custom_config: + verify = custom_config["ca_certs_path"] + else: + verify = None + return url, headers, verify, api_key class ModelFactory: """A factory for creating and managing different AI models.""" @staticmethod - def load_config(config_path: str) -> Dict[str, Any]: - """Loads model configurations from a JSON file.""" - with open(config_path, "r") as f: - return json.load(f) + def load_config() -> Dict[str, Any]: + load_model_config_callbacks = callbacks.get_callbacks("load_model_config") + if len(load_model_config_callbacks) > 0: + if len(load_model_config_callbacks) > 1: + logging.getLogger(__name__).warning( + "Multiple load_model_config callbacks registered, using the first" + ) + config = callbacks.on_load_model_config()[0] + else: + from code_puppy.config import MODELS_FILE + + with open(pathlib.Path(__file__).parent / "models.json", "r") as src: + with open(pathlib.Path(MODELS_FILE), "w") as target: + target.write(src.read()) + + with open(MODELS_FILE, "r") as f: + config = json.load(f) + + if pathlib.Path(EXTRA_MODELS_FILE).exists(): + try: + with open(EXTRA_MODELS_FILE, "r") as f: + extra_config = json.load(f) + config.update(extra_config) + except json.JSONDecodeError as e: + logging.getLogger(__name__).warning( + f"Failed to load extra models config from {EXTRA_MODELS_FILE}: Invalid JSON - {e}\n" + f"Please check your extra_models.json file for syntax errors." + ) + except Exception as e: + logging.getLogger(__name__).warning( + f"Failed to load extra models config from {EXTRA_MODELS_FILE}: {e}\n" + f"The extra models configuration will be ignored." + ) + return config @staticmethod def get_model(model_name: str, config: Dict[str, Any]) -> Any: @@ -131,55 +141,211 @@ def get_model(model_name: str, config: Dict[str, Any]) -> Any: model_type = model_config.get("type") - # Common configuration for rate limiting and retries - max_requests_per_minute = model_config.get("max_requests_per_minute", 100) - max_retries = model_config.get("max_retries", 3) - retry_base_delay = model_config.get("retry_base_delay", 1.0) + if model_type == "gemini": + provider = GoogleProvider(api_key=os.environ.get("GEMINI_API_KEY", "")) - client = make_client( - max_requests_per_minute=max_requests_per_minute, - max_retries=max_retries, - retry_base_delay=retry_base_delay, - ) + model = GoogleModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model - if model_type == "gemini": - provider = GoogleGLAProvider( - api_key=os.environ.get("GEMINI_API_KEY", "") + elif model_type == "openai": + provider = OpenAIProvider(api_key=os.environ.get("OPENAI_API_KEY", "")) + + model = OpenAIChatModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model + + elif model_type == "anthropic": + api_key = os.environ.get("ANTHROPIC_API_KEY", None) + if not api_key: + raise ValueError( + "ANTHROPIC_API_KEY environment variable must be set for Anthropic models." + ) + anthropic_client = AsyncAnthropic(api_key=api_key) + provider = AnthropicProvider(anthropic_client=anthropic_client) + return AnthropicModel(model_name=model_config["name"], provider=provider) + + elif model_type == "custom_anthropic": + url, headers, verify, api_key = get_custom_config(model_config) + client = create_async_client(headers=headers, verify=verify) + anthropic_client = AsyncAnthropic( + base_url=url, + http_client=client, + api_key=api_key, ) + provider = AnthropicProvider(anthropic_client=anthropic_client) + return AnthropicModel(model_name=model_config["name"], provider=provider) + + elif model_type == "azure_openai": + azure_endpoint_config = model_config.get("azure_endpoint") + if not azure_endpoint_config: + raise ValueError( + "Azure OpenAI model type requires 'azure_endpoint' in its configuration." + ) + azure_endpoint = azure_endpoint_config + if azure_endpoint_config.startswith("$"): + azure_endpoint = os.environ.get(azure_endpoint_config[1:]) + if not azure_endpoint: + raise ValueError( + f"Azure OpenAI endpoint environment variable '{azure_endpoint_config[1:] if azure_endpoint_config.startswith('$') else ''}' not found or is empty." + ) - return GeminiModel(model_name=model_config["name"], provider=provider) + api_version_config = model_config.get("api_version") + if not api_version_config: + raise ValueError( + "Azure OpenAI model type requires 'api_version' in its configuration." + ) + api_version = api_version_config + if api_version_config.startswith("$"): + api_version = os.environ.get(api_version_config[1:]) + if not api_version: + raise ValueError( + f"Azure OpenAI API version environment variable '{api_version_config[1:] if api_version_config.startswith('$') else ''}' not found or is empty." + ) - elif model_type == "openai": - provider = OpenAIProvider( - api_key=os.environ.get("OPENAI_API_KEY", "") + api_key_config = model_config.get("api_key") + if not api_key_config: + raise ValueError( + "Azure OpenAI model type requires 'api_key' in its configuration." + ) + api_key = api_key_config + if api_key_config.startswith("$"): + api_key = os.environ.get(api_key_config[1:]) + if not api_key: + raise ValueError( + f"Azure OpenAI API key environment variable '{api_key_config[1:] if api_key_config.startswith('$') else ''}' not found or is empty." + ) + + # Configure max_retries for the Azure client, defaulting if not specified in config + azure_max_retries = model_config.get("max_retries", 2) + + azure_client = AsyncAzureOpenAI( + azure_endpoint=azure_endpoint, + api_version=api_version, + api_key=api_key, + max_retries=azure_max_retries, ) + provider = OpenAIProvider(openai_client=azure_client) + model = OpenAIChatModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model - return OpenAIModel(model_name=model_config["name"], provider=provider) - elif model_type == "custom_openai": - custom_config = model_config.get("custom_endpoint", {}) - if not custom_config: - raise ValueError("Custom model requires 'custom_endpoint' configuration") - - url = custom_config.get("url") - if not url: - raise ValueError("Custom endpoint requires 'url' field") - - headers = {} - for key, value in custom_config.get("headers", {}).items(): - headers[key] = value - - if "ca_certs_path" in custom_config: - ca_certs_path = custom_config.get("ca_certs_path") - - client = httpx.AsyncClient(headers=headers, verify=ca_certs_path) - - provider = OpenAIProvider( + url, headers, verify, api_key = get_custom_config(model_config) + client = create_async_client(headers=headers, verify=verify) + provider_args = dict( base_url=url, http_client=client, ) - - return OpenAIModel(model_name=model_config["name"], provider=provider) + if api_key: + provider_args["api_key"] = api_key + provider = OpenAIProvider(**provider_args) + + model = OpenAIChatModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model + elif model_type == "zai_coding": + zai_model = ZaiChatModel( + model_name=model_config["name"], + provider=OpenAIProvider( + api_key=os.getenv('ZAI_API_KEY'), + base_url='https://api.z.ai/api/coding/paas/v4' + ) + ) + return zai_model + elif model_type == "zai_api": + zai_model = ZaiChatModel( + model_name=model_config["name"], + provider=OpenAIProvider( + api_key=os.getenv('ZAI_API_KEY'), + base_url='https://api.z.ai/api/paas/v4/' + ) + ) + return zai_model + elif model_type == "custom_gemini": + url, headers, verify, api_key = get_custom_config(model_config) + os.environ["GEMINI_API_KEY"] = api_key + + class CustomGoogleGLAProvider(GoogleProvider): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + @property + def base_url(self): + return url + + @property + def client(self) -> httpx.AsyncClient: + _client = create_async_client(headers=headers, verify=verify) + _client.base_url = self.base_url + return _client + + google_gla = CustomGoogleGLAProvider(api_key=api_key) + model = GoogleModel(model_name=model_config["name"], provider=google_gla) + return model + elif model_type == "cerebras": + url, headers, verify, api_key = get_custom_config(model_config) + client = create_async_client(headers=headers, verify=verify) + provider_args = dict( + api_key=api_key, + http_client=client, + ) + if api_key: + provider_args["api_key"] = api_key + provider = CerebrasProvider(**provider_args) + + model = OpenAIChatModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model + + elif model_type == "openrouter": + # Get API key from config, which can be an environment variable reference or raw value + api_key_config = model_config.get("api_key") + api_key = None + + if api_key_config: + if api_key_config.startswith("$"): + # It's an environment variable reference + env_var_name = api_key_config[1:] # Remove the $ prefix + api_key = os.environ.get(env_var_name) + if api_key is None: + raise ValueError( + f"OpenRouter API key environment variable '{env_var_name}' not found or is empty. " + f"Please set the environment variable: export {env_var_name}=your_value" + ) + else: + # It's a raw API key value + api_key = api_key_config + else: + # No API key in config, try to get it from the default environment variable + api_key = os.environ.get("OPENROUTER_API_KEY") + + provider = OpenRouterProvider(api_key=api_key) + + model = OpenAIChatModel(model_name=model_config["name"], provider=provider) + setattr(model, "provider", provider) + return model + + elif model_type == "round_robin": + # Get the list of model names to use in the round-robin + model_names = model_config.get("models") + if not model_names or not isinstance(model_names, list): + raise ValueError( + f"Round-robin model '{model_name}' requires a 'models' list in its configuration." + ) + + # Get the rotate_every parameter (default: 1) + rotate_every = model_config.get("rotate_every", 1) + + # Resolve each model name to an actual model instance + models = [] + for name in model_names: + # Recursively get each model using the factory + model = ModelFactory.get_model(name, config) + models.append(model) + + # Create and return the round-robin model + return RoundRobinModel(*models, rotate_every=rotate_every) else: raise ValueError(f"Unsupported model type: {model_type}") diff --git a/code_puppy/models.json b/code_puppy/models.json index a74dfa9a..dfedbc8d 100644 --- a/code_puppy/models.json +++ b/code_puppy/models.json @@ -1,72 +1,71 @@ { - "gemini-2.5-flash-preview-05-20": { - "type": "gemini", - "name": "gemini-2.5-flash-preview-05-20", - "max_requests_per_minute": 10, - "max_retries": 3, - "retry_base_delay": 10 + "gpt-5": { + "type": "openai", + "name": "gpt-5", + "context_length": 400000 }, - "gemini-2.0-flash": { - "type": "gemini", - "name": "gemini-2.0-flash", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "Cerebras-Qwen3-Coder-480b": { + "type": "cerebras", + "name": "qwen-3-coder-480b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY" + }, + "context_length": 131072 }, - "gpt-4o": { - "type": "openai", - "name": "gpt-4o", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "Cerebras-Qwen3-235b-a22b-instruct-2507": { + "type": "cerebras", + "name": "qwen-3-235b-a22b-instruct-2507", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY" + }, + "context_length": 64000 }, - "gpt-4o-mini": { - "type": "openai", - "name": "gpt-4o-mini", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "Cerebras-gpt-oss-120b": { + "type": "cerebras", + "name": "gpt-oss-120b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY" + }, + "context_length": 131072 }, - "gpt-4.1": { - "type": "openai", - "name": "gpt-4.1", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "Cerebras-Qwen-3-32b": { + "type": "cerebras", + "name": "qwen-3-32b", + "custom_endpoint": { + "url": "https://api.cerebras.ai/v1", + "api_key": "$CEREBRAS_API_KEY" + }, + "context_length": 65536 }, - "gpt-4.1-mini": { - "type": "openai", - "name": "gpt-4.1-mini", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "claude-4-0-sonnet": { + "type": "anthropic", + "name": "claude-sonnet-4-20250514", + "context_length": 200000 }, - "gpt-4.1-nano": { - "type": "openai", - "name": "gpt-4.1-nano", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "claude-4-5-sonnet": { + "type": "anthropic", + "name": "claude-sonnet-4-5-20250929", + "context_length": 200000 }, - "o3-mini": { - "type": "openai", - "name": "o3-mini", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10 + "glm-4.5-coding": { + "type": "zai_coding", + "name": "glm-4.5" }, - "gpt-4o-custom": { - "type": "custom_openai", - "name": "gpt-4o", - "max_requests_per_minute": 100, - "max_retries": 3, - "retry_base_delay": 10, - "custom_endpoint": { - "url": "https://my.cute.endpoint:8080", - "headers": { - "X-Api-Key": "$OPENAI_API_KEY" - }, - "ca_certs_path": "/path/to/cert.pem" - } + "glm-4.6-coding": { + "type": "zai_coding", + "name": "glm-4.6", + "context_length": 200000 + }, + "glm-4.5": { + "type": "zai_api", + "name": "glm-4.5" + }, + "glm-4.6": { + "type": "zai_api", + "name": "glm-4.6", + "context_length": 200000 } -} \ No newline at end of file +} diff --git a/code_puppy/plugins/__init__.py b/code_puppy/plugins/__init__.py new file mode 100644 index 00000000..4b39f436 --- /dev/null +++ b/code_puppy/plugins/__init__.py @@ -0,0 +1,32 @@ +import importlib +import logging +from pathlib import Path + +logger = logging.getLogger(__name__) + + +def load_plugin_callbacks(): + """Dynamically load register_callbacks.py from all plugin submodules.""" + plugins_dir = Path(__file__).parent + + # Iterate through all subdirectories in the plugins folder + for item in plugins_dir.iterdir(): + if item.is_dir() and not item.name.startswith("_"): + plugin_name = item.name + callbacks_file = item / "register_callbacks.py" + + if callbacks_file.exists(): + try: + # Import the register_callbacks module dynamically + module_name = f"code_puppy.plugins.{plugin_name}.register_callbacks" + logger.debug(f"Loading plugin callbacks from {module_name}") + importlib.import_module(module_name) + logger.info( + f"Successfully loaded callbacks from plugin: {plugin_name}" + ) + except ImportError as e: + logger.warning( + f"Failed to import callbacks from plugin {plugin_name}: {e}" + ) + except Exception as e: + logger.error(f"Unexpected error loading plugin {plugin_name}: {e}") diff --git a/code_puppy/plugins/example_custom_command/register_callbacks.py b/code_puppy/plugins/example_custom_command/register_callbacks.py new file mode 100644 index 00000000..9b44bfe9 --- /dev/null +++ b/code_puppy/plugins/example_custom_command/register_callbacks.py @@ -0,0 +1,51 @@ +from code_puppy.callbacks import register_callback +from code_puppy.messaging import emit_info + + +def _custom_help(): + return [ + ("woof", "Emit a playful woof message (no model)"), + ("echo", "Echo back your text (display only)"), + ] + + +def _handle_custom_command(command: str, name: str): + """Handle a demo custom command. + + Policy: custom commands must NOT invoke the model. They should emit + messages or return True to indicate handling. Returning a string is + treated as a display-only message by the command handler. + + Supports: + - /woof → emits a fun message and returns True + - /echo → emits the text (display-only) + """ + if not name: + return None + + if name == "woof": + # If extra text is provided, pass it as a prompt; otherwise, send a fun default + parts = command.split(maxsplit=1) + if len(parts) == 2: + text = parts[1] + emit_info(f"🐶 Woof! sending prompt: {text}") + return text + emit_info("🐶 Woof! sending prompt: Tell me a dog fact") + return "Tell me a dog fact" + + if name == "echo": + # Return the rest of the command (after the name) to be treated as input + # Example: "/echo Hello" → returns "Hello" + rest = command.split(maxsplit=1) + if len(rest) == 2: + text = rest[1] + emit_info(f"[dim]example plugin echo ->[/dim] {text}") + return text + emit_info("[dim]example plugin echo (empty)[/dim]") + return "" + + return None + + +register_callback("custom_command_help", _custom_help) +register_callback("custom_command", _handle_custom_command) diff --git a/code_puppy/reopenable_async_client.py b/code_puppy/reopenable_async_client.py new file mode 100644 index 00000000..e9237dcd --- /dev/null +++ b/code_puppy/reopenable_async_client.py @@ -0,0 +1,225 @@ +""" +ReopenableAsyncClient - A reopenable httpx.AsyncClient wrapper. + +This module provides a ReopenableAsyncClient class that extends httpx.AsyncClient +to support reopening after being closed, which the standard httpx.AsyncClient +doesn't support. +""" + +from typing import Optional, Union + +import httpx + + +class ReopenableAsyncClient: + """ + A wrapper around httpx.AsyncClient that can be reopened after being closed. + + Standard httpx.AsyncClient becomes unusable after calling aclose(). + This class allows you to reopen the client and continue using it. + + Example: + >>> client = ReopenableAsyncClient(timeout=30.0) + >>> await client.get("https://httpbin.org/get") + >>> await client.aclose() + >>> # Client is now closed, but can be reopened + >>> await client.reopen() + >>> await client.get("https://httpbin.org/get") # Works! + + The client preserves all original configuration when reopening. + """ + + class _StreamWrapper: + """Async context manager wrapper for streaming responses.""" + + def __init__( + self, + parent_client: "ReopenableAsyncClient", + method: str, + url: Union[str, httpx.URL], + **kwargs, + ): + self.parent_client = parent_client + self.method = method + self.url = url + self.kwargs = kwargs + self._stream_context = None + + async def __aenter__(self): + client = await self.parent_client._ensure_client_open() + self._stream_context = client.stream(self.method, self.url, **self.kwargs) + return await self._stream_context.__aenter__() + + async def __aexit__(self, exc_type, exc_val, exc_tb): + if self._stream_context: + return await self._stream_context.__aexit__(exc_type, exc_val, exc_tb) + + def __init__(self, **kwargs): + """ + Initialize the ReopenableAsyncClient. + + Args: + **kwargs: All arguments that would be passed to httpx.AsyncClient() + """ + self._client_kwargs = kwargs.copy() + self._client: Optional[httpx.AsyncClient] = None + self._is_closed = True + + async def _ensure_client_open(self) -> httpx.AsyncClient: + """ + Ensure the underlying client is open and ready to use. + + Returns: + The active httpx.AsyncClient instance + + Raises: + RuntimeError: If client cannot be opened + """ + if self._is_closed or self._client is None: + await self._create_client() + return self._client + + async def _create_client(self) -> None: + """Create a new httpx.AsyncClient with the stored configuration.""" + if self._client is not None and not self._is_closed: + # Close existing client first + await self._client.aclose() + + self._client = httpx.AsyncClient(**self._client_kwargs) + self._is_closed = False + + async def reopen(self) -> None: + """ + Explicitly reopen the client after it has been closed. + + This is useful when you want to reuse a client that was previously closed. + """ + await self._create_client() + + async def aclose(self) -> None: + """ + Close the underlying httpx.AsyncClient. + + After calling this, the client can be reopened using reopen() or + automatically when making the next request. + """ + if self._client is not None and not self._is_closed: + await self._client.aclose() + self._is_closed = True + + @property + def is_closed(self) -> bool: + """Check if the client is currently closed.""" + return self._is_closed or self._client is None + + # Delegate all httpx.AsyncClient methods to the underlying client + + async def get(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a GET request.""" + client = await self._ensure_client_open() + return await client.get(url, **kwargs) + + async def post(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a POST request.""" + client = await self._ensure_client_open() + return await client.post(url, **kwargs) + + async def put(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a PUT request.""" + client = await self._ensure_client_open() + return await client.put(url, **kwargs) + + async def patch(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a PATCH request.""" + client = await self._ensure_client_open() + return await client.patch(url, **kwargs) + + async def delete(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a DELETE request.""" + client = await self._ensure_client_open() + return await client.delete(url, **kwargs) + + async def head(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make a HEAD request.""" + client = await self._ensure_client_open() + return await client.head(url, **kwargs) + + async def options(self, url: Union[str, httpx.URL], **kwargs) -> httpx.Response: + """Make an OPTIONS request.""" + client = await self._ensure_client_open() + return await client.options(url, **kwargs) + + async def request( + self, method: str, url: Union[str, httpx.URL], **kwargs + ) -> httpx.Response: + """Make a request with the specified HTTP method.""" + client = await self._ensure_client_open() + return await client.request(method, url, **kwargs) + + async def send(self, request: httpx.Request, **kwargs) -> httpx.Response: + """Send a pre-built request.""" + client = await self._ensure_client_open() + return await client.send(request, **kwargs) + + def build_request( + self, method: str, url: Union[str, httpx.URL], **kwargs + ) -> httpx.Request: + """ + Build a request without sending it. + + Note: This creates a temporary client if none exists, but doesn't keep it open. + """ + if self._client is None or self._is_closed: + # Create a temporary client just for building the request + temp_client = httpx.AsyncClient(**self._client_kwargs) + try: + request = temp_client.build_request(method, url, **kwargs) + return request + finally: + # Clean up the temporary client synchronously if possible + # Note: This might leave a connection open, but it's better than + # making this method async just for building requests + pass + return self._client.build_request(method, url, **kwargs) + + def stream(self, method: str, url: Union[str, httpx.URL], **kwargs): + """Stream a request. Returns an async context manager.""" + return self._StreamWrapper(self, method, url, **kwargs) + + # Context manager support + async def __aenter__(self): + """Async context manager entry.""" + await self._ensure_client_open() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Async context manager exit.""" + await self.aclose() + + # Properties that don't require an active client + @property + def timeout(self) -> Optional[httpx.Timeout]: + """Get the configured timeout.""" + return self._client_kwargs.get("timeout") + + @property + def headers(self) -> httpx.Headers: + """Get the configured headers.""" + if self._client is not None: + return self._client.headers + # Return headers from kwargs if client doesn't exist + headers = self._client_kwargs.get("headers", {}) + return httpx.Headers(headers) + + @property + def cookies(self) -> httpx.Cookies: + """Get the current cookies.""" + if self._client is not None and not self._is_closed: + return self._client.cookies + # Return empty cookies if client doesn't exist or is closed + return httpx.Cookies() + + def __repr__(self) -> str: + """String representation of the client.""" + status = "closed" if self.is_closed else "open" + return f"" diff --git a/code_puppy/round_robin_model.py b/code_puppy/round_robin_model.py new file mode 100644 index 00000000..7eef0c93 --- /dev/null +++ b/code_puppy/round_robin_model.py @@ -0,0 +1,149 @@ +from contextlib import asynccontextmanager, suppress +from dataclasses import dataclass, field +from typing import Any, AsyncIterator, List + +from pydantic_ai.models import ( + Model, + ModelMessage, + ModelRequestParameters, + ModelResponse, + ModelSettings, + StreamedResponse, +) +from pydantic_ai.models.fallback import merge_model_settings +from pydantic_ai.result import RunContext + +try: + from opentelemetry.context import get_current_span +except ImportError: + # If opentelemetry is not installed, provide a dummy implementation + def get_current_span(): + class DummySpan: + def is_recording(self): + return False + + def set_attributes(self, attributes): + pass + + return DummySpan() + + +@dataclass(init=False) +class RoundRobinModel(Model): + """A model that cycles through multiple models in a round-robin fashion. + + This model distributes requests across multiple candidate models to help + overcome rate limits or distribute load. + """ + + models: List[Model] + _current_index: int = field(default=0, repr=False) + _model_name: str = field(repr=False) + _rotate_every: int = field(default=1, repr=False) + _request_count: int = field(default=0, repr=False) + + def __init__( + self, + *models: Model, + rotate_every: int = 1, + settings: ModelSettings | None = None, + ): + """Initialize a round-robin model instance. + + Args: + models: The model instances to cycle through. + rotate_every: Number of requests before rotating to the next model (default: 1). + settings: Model settings that will be used as defaults for this model. + """ + super().__init__(settings=settings) + if not models: + raise ValueError("At least one model must be provided") + if rotate_every < 1: + raise ValueError("rotate_every must be at least 1") + self.models = list(models) + self._current_index = 0 + self._request_count = 0 + self._rotate_every = rotate_every + + @property + def model_name(self) -> str: + """The model name showing this is a round-robin model with its candidates.""" + base_name = f"round_robin:{','.join(model.model_name for model in self.models)}" + if self._rotate_every != 1: + return f"{base_name}:rotate_every={self._rotate_every}" + return base_name + + @property + def system(self) -> str: + """System prompt from the current model.""" + return self.models[self._current_index].system + + @property + def base_url(self) -> str | None: + """Base URL from the current model.""" + return self.models[self._current_index].base_url + + def _get_next_model(self) -> Model: + """Get the next model in the round-robin sequence and update the index.""" + model = self.models[self._current_index] + self._request_count += 1 + if self._request_count >= self._rotate_every: + self._current_index = (self._current_index + 1) % len(self.models) + self._request_count = 0 + return model + + async def request( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + ) -> ModelResponse: + """Make a request using the next model in the round-robin sequence.""" + current_model = self._get_next_model() + # Use the current model's settings as base, then merge with provided settings + merged_settings = merge_model_settings(current_model.settings, model_settings) + customized_model_request_parameters = ( + current_model.customize_request_parameters(model_request_parameters) + ) + + try: + response = await current_model.request( + messages, merged_settings, customized_model_request_parameters + ) + self._set_span_attributes(current_model) + return response + except Exception as exc: + # Unlike FallbackModel, we don't try other models here + # The round-robin strategy is about distribution, not failover + raise exc + + @asynccontextmanager + async def request_stream( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + run_context: RunContext[Any] | None = None, + ) -> AsyncIterator[StreamedResponse]: + """Make a streaming request using the next model in the round-robin sequence.""" + current_model = self._get_next_model() + # Use the current model's settings as base, then merge with provided settings + merged_settings = merge_model_settings(current_model.settings, model_settings) + customized_model_request_parameters = ( + current_model.customize_request_parameters(model_request_parameters) + ) + + async with current_model.request_stream( + messages, merged_settings, customized_model_request_parameters, run_context + ) as response: + self._set_span_attributes(current_model) + yield response + + def _set_span_attributes(self, model: Model): + """Set span attributes for observability.""" + with suppress(Exception): + span = get_current_span() + if span.is_recording(): + attributes = getattr(span, "attributes", {}) + if attributes.get("gen_ai.request.model") == self.model_name: + span.set_attributes(model.model_attributes(model)) diff --git a/code_puppy/session_storage.py b/code_puppy/session_storage.py new file mode 100644 index 00000000..56505491 --- /dev/null +++ b/code_puppy/session_storage.py @@ -0,0 +1,289 @@ +"""Shared helpers for persisting and restoring chat sessions. + +This module centralises the pickle + metadata handling that used to live in +both the CLI command handler and the auto-save feature. Keeping it here helps +us avoid duplication while staying inside the Zen-of-Python sweet spot: simple +is better than complex, nested side effects are worse than deliberate helpers. +""" + +from __future__ import annotations + +import json +import pickle +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Callable, List + +SessionHistory = List[Any] +TokenEstimator = Callable[[Any], int] + + +@dataclass(slots=True) +class SessionPaths: + pickle_path: Path + metadata_path: Path + + +@dataclass(slots=True) +class SessionMetadata: + session_name: str + timestamp: str + message_count: int + total_tokens: int + pickle_path: Path + metadata_path: Path + auto_saved: bool = False + + def as_serialisable(self) -> dict[str, Any]: + return { + "session_name": self.session_name, + "timestamp": self.timestamp, + "message_count": self.message_count, + "total_tokens": self.total_tokens, + "file_path": str(self.pickle_path), + "auto_saved": self.auto_saved, + } + + +def ensure_directory(path: Path) -> Path: + path.mkdir(parents=True, exist_ok=True) + return path + + +def build_session_paths(base_dir: Path, session_name: str) -> SessionPaths: + pickle_path = base_dir / f"{session_name}.pkl" + metadata_path = base_dir / f"{session_name}_meta.json" + return SessionPaths(pickle_path=pickle_path, metadata_path=metadata_path) + + +def save_session( + *, + history: SessionHistory, + session_name: str, + base_dir: Path, + timestamp: str, + token_estimator: TokenEstimator, + auto_saved: bool = False, +) -> SessionMetadata: + ensure_directory(base_dir) + paths = build_session_paths(base_dir, session_name) + + with paths.pickle_path.open("wb") as pickle_file: + pickle.dump(history, pickle_file) + + total_tokens = sum(token_estimator(message) for message in history) + metadata = SessionMetadata( + session_name=session_name, + timestamp=timestamp, + message_count=len(history), + total_tokens=total_tokens, + pickle_path=paths.pickle_path, + metadata_path=paths.metadata_path, + auto_saved=auto_saved, + ) + + with paths.metadata_path.open("w", encoding="utf-8") as metadata_file: + json.dump(metadata.as_serialisable(), metadata_file, indent=2) + + return metadata + + +def load_session(session_name: str, base_dir: Path) -> SessionHistory: + paths = build_session_paths(base_dir, session_name) + if not paths.pickle_path.exists(): + raise FileNotFoundError(paths.pickle_path) + with paths.pickle_path.open("rb") as pickle_file: + return pickle.load(pickle_file) + + +def list_sessions(base_dir: Path) -> List[str]: + if not base_dir.exists(): + return [] + return sorted(path.stem for path in base_dir.glob("*.pkl")) + + +def cleanup_sessions(base_dir: Path, max_sessions: int) -> List[str]: + if max_sessions <= 0: + return [] + + if not base_dir.exists(): + return [] + + candidate_paths = list(base_dir.glob("*.pkl")) + if len(candidate_paths) <= max_sessions: + return [] + + sorted_candidates = sorted( + ((path.stat().st_mtime, path) for path in candidate_paths), + key=lambda item: item[0], + ) + + stale_entries = sorted_candidates[:-max_sessions] + removed_sessions: List[str] = [] + for _, pickle_path in stale_entries: + metadata_path = base_dir / f"{pickle_path.stem}_meta.json" + try: + pickle_path.unlink(missing_ok=True) + metadata_path.unlink(missing_ok=True) + removed_sessions.append(pickle_path.stem) + except OSError: + continue + + return removed_sessions + + +async def restore_autosave_interactively(base_dir: Path) -> None: + """Prompt the user to load an autosave session from base_dir, if any exist. + + This helper is deliberately placed in session_storage to keep autosave + restoration close to the persistence layer. It uses the same public APIs + (list_sessions, load_session) and mirrors the interactive behaviours from + the command handler. + """ + sessions = list_sessions(base_dir) + if not sessions: + return + + # Import locally to avoid pulling the messaging layer into storage modules + from datetime import datetime + from prompt_toolkit.formatted_text import FormattedText + + from code_puppy.agents.agent_manager import get_current_agent + from code_puppy.command_line.prompt_toolkit_completion import ( + get_input_with_combined_completion, + ) + from code_puppy.messaging import emit_success, emit_system_message, emit_warning + + entries = [] + for name in sessions: + meta_path = base_dir / f"{name}_meta.json" + try: + with meta_path.open("r", encoding="utf-8") as meta_file: + data = json.load(meta_file) + timestamp = data.get("timestamp") + message_count = data.get("message_count") + except Exception: + timestamp = None + message_count = None + entries.append((name, timestamp, message_count)) + + def sort_key(entry): + _, timestamp, _ = entry + if timestamp: + try: + return datetime.fromisoformat(timestamp) + except ValueError: + return datetime.min + return datetime.min + + entries.sort(key=sort_key, reverse=True) + + PAGE_SIZE = 5 + total = len(entries) + page = 0 + + def render_page() -> None: + start = page * PAGE_SIZE + end = min(start + PAGE_SIZE, total) + page_entries = entries[start:end] + emit_system_message("[bold magenta]Autosave Sessions Available:[/bold magenta]") + for idx, (name, timestamp, message_count) in enumerate(page_entries, start=1): + timestamp_display = timestamp or "unknown time" + message_display = ( + f"{message_count} messages" if message_count is not None else "unknown size" + ) + emit_system_message( + f" [{idx}] {name} ({message_display}, saved at {timestamp_display})" + ) + # If there are more pages, offer next-page; show 'Return to first page' on last page + if total > PAGE_SIZE: + page_count = (total + PAGE_SIZE - 1) // PAGE_SIZE + is_last_page = (page + 1) >= page_count + remaining = total - (page * PAGE_SIZE + len(page_entries)) + summary = f" and {remaining} more" if (remaining > 0 and not is_last_page) else "" + label = "Return to first page" if is_last_page else f"Next page{summary}" + emit_system_message(f" [6] {label}") + emit_system_message(" [Enter] Skip loading autosave") + + chosen_name: str | None = None + + while True: + render_page() + try: + selection = await get_input_with_combined_completion( + FormattedText( + [ + ( + "class:prompt", + "Pick 1-5 to load, 6 for next, or name/Enter: ", + ) + ] + ) + ) + except (KeyboardInterrupt, EOFError): + emit_warning("Autosave selection cancelled") + return + + selection = (selection or "").strip() + if not selection: + return + + # Numeric choice: 1-5 select within current page; 6 advances page + if selection.isdigit(): + num = int(selection) + if num == 6 and total > PAGE_SIZE: + page = (page + 1) % ((total + PAGE_SIZE - 1) // PAGE_SIZE) + # loop and re-render next page + continue + if 1 <= num <= 5: + start = page * PAGE_SIZE + idx = start + (num - 1) + if 0 <= idx < total: + chosen_name = entries[idx][0] + break + else: + emit_warning("Invalid selection for this page") + continue + emit_warning("Invalid selection; choose 1-5 or 6 for next") + continue + + # Allow direct typing by exact session name + for name, _ts, _mc in entries: + if name == selection: + chosen_name = name + break + if chosen_name: + break + emit_warning("No autosave loaded (invalid selection)") + # keep looping and allow another try + + if not chosen_name: + return + + try: + history = load_session(chosen_name, base_dir) + except FileNotFoundError: + emit_warning(f"Autosave '{chosen_name}' could not be found") + return + except Exception as exc: + emit_warning(f"Failed to load autosave '{chosen_name}': {exc}") + return + + agent = get_current_agent() + agent.set_message_history(history) + + # Set current autosave session id so subsequent autosaves overwrite this session + try: + from code_puppy.config import set_current_autosave_from_session_name + + set_current_autosave_from_session_name(chosen_name) + except Exception: + pass + + total_tokens = sum(agent.estimate_tokens_for_message(msg) for msg in history) + + session_path = base_dir / f"{chosen_name}.pkl" + emit_success( + f"✅ Autosave loaded: {len(history)} messages ({total_tokens} tokens)\n" + f"📁 From: {session_path}" + ) diff --git a/code_puppy/status_display.py b/code_puppy/status_display.py new file mode 100644 index 00000000..8782c9a2 --- /dev/null +++ b/code_puppy/status_display.py @@ -0,0 +1,234 @@ +import asyncio +import time + +from rich.console import Console +from rich.live import Live +from rich.panel import Panel +from rich.spinner import Spinner +from rich.text import Text + +# Global variable to track current token per second rate +CURRENT_TOKEN_RATE = 0.0 + + +class StatusDisplay: + """ + Displays real-time status information during model execution, + including token per second rate and rotating loading messages. + """ + + def __init__(self, console: Console): + self.console = console + self.token_count = 0 + self.start_time = None + self.last_update_time = None + self.last_token_count = 0 + self.current_rate = 0 + self.is_active = False + self.task = None + self.live = None + self.loading_messages = [ + "Fetching...", + "Sniffing around...", + "Wagging tail...", + "Pawsing for a moment...", + "Chasing tail...", + "Digging up results...", + "Barking at the data...", + "Rolling over...", + "Panting with excitement...", + "Chewing on it...", + "Prancing along...", + "Howling at the code...", + "Snuggling up to the task...", + "Bounding through data...", + "Puppy pondering...", + ] + self.current_message_index = 0 + self.spinner = Spinner("dots", text="") + + def _calculate_rate(self) -> float: + """Calculate the current token rate""" + current_time = time.time() + if self.last_update_time: + time_diff = current_time - self.last_update_time + token_diff = self.token_count - self.last_token_count + if time_diff > 0: + rate = token_diff / time_diff + # Smooth the rate calculation with the current rate + if self.current_rate > 0: + self.current_rate = (self.current_rate * 0.7) + (rate * 0.3) + else: + self.current_rate = rate + + # Only ensure rate is not negative + self.current_rate = max(0, self.current_rate) + + # Update the global rate for other components to access + global CURRENT_TOKEN_RATE + CURRENT_TOKEN_RATE = self.current_rate + + self.last_update_time = current_time + self.last_token_count = self.token_count + return self.current_rate + + def update_rate_from_sse( + self, completion_tokens: int, completion_time: float + ) -> None: + """Update the token rate directly using SSE time_info data + + Args: + completion_tokens: Number of tokens in the completion (from SSE stream) + completion_time: Time taken for completion in seconds (from SSE stream) + """ + if completion_time > 0: + # Using the direct t/s formula: tokens / time + rate = completion_tokens / completion_time + + # Use a lighter smoothing for this more accurate data + if self.current_rate > 0: + self.current_rate = (self.current_rate * 0.3) + ( + rate * 0.7 + ) # Weight SSE data more heavily + else: + self.current_rate = rate + + # Update the global rate + global CURRENT_TOKEN_RATE + CURRENT_TOKEN_RATE = self.current_rate + + @staticmethod + def get_current_rate() -> float: + """Get the current token rate for use in other components""" + global CURRENT_TOKEN_RATE + return CURRENT_TOKEN_RATE + + def update_token_count(self, tokens: int) -> None: + """Update the token count and recalculate the rate""" + # Reset timing if this is the first update of a new task + if self.start_time is None: + self.start_time = time.time() + self.last_update_time = self.start_time + # Reset token counters for new task + self.last_token_count = 0 + self.current_rate = 0.0 + + # Allow for incremental updates (common for streaming) or absolute updates + if tokens > self.token_count or tokens < 0: + # Incremental update or reset + self.token_count = tokens if tokens >= 0 else 0 + else: + # If tokens <= current count but > 0, treat as incremental + # This handles simulated token streaming + self.token_count += tokens + + self._calculate_rate() + + def _get_status_panel(self) -> Panel: + """Generate a status panel with current rate and animated message""" + rate_text = ( + f"{self.current_rate:.1f} t/s" if self.current_rate > 0 else "Warming up..." + ) + + # Update spinner + self.spinner.update() + + # Rotate through loading messages every few updates + if int(time.time() * 2) % 4 == 0: + self.current_message_index = (self.current_message_index + 1) % len( + self.loading_messages + ) + + # Create a highly visible status message + status_text = Text.assemble( + Text(f"⏳ {rate_text} ", style="bold cyan"), + self.spinner, + Text( + f" {self.loading_messages[self.current_message_index]} ⏳", + style="bold yellow", + ), + ) + + # Use expanded panel with more visible formatting + return Panel( + status_text, + title="[bold blue]Code Puppy Status[/bold blue]", + border_style="bright_blue", + expand=False, + padding=(1, 2), + ) + + def _get_status_text(self) -> Text: + """Generate a status text with current rate and animated message""" + rate_text = ( + f"{self.current_rate:.1f} t/s" if self.current_rate > 0 else "Warming up..." + ) + + # Update spinner + self.spinner.update() + + # Rotate through loading messages + self.current_message_index = (self.current_message_index + 1) % len( + self.loading_messages + ) + message = self.loading_messages[self.current_message_index] + + # Create a highly visible status text + return Text.assemble( + Text(f"⏳ {rate_text} 🐾", style="bold cyan"), + Text(f" {message}", style="yellow"), + ) + + async def _update_display(self) -> None: + """Update the display continuously while active using Rich Live display""" + # Add a newline to ensure we're below the blue bar + self.console.print("\n") + + # Create a Live display that will update in-place + with Live( + self._get_status_text(), + console=self.console, + refresh_per_second=2, # Update twice per second + transient=False, # Keep the final state visible + ) as live: + # Keep updating the live display while active + while self.is_active: + live.update(self._get_status_text()) + await asyncio.sleep(0.5) + + def start(self) -> None: + """Start the status display""" + if not self.is_active: + self.is_active = True + self.start_time = time.time() + self.last_update_time = self.start_time + self.token_count = 0 + self.last_token_count = 0 + self.current_rate = 0 + self.task = asyncio.create_task(self._update_display()) + + def stop(self) -> None: + """Stop the status display""" + if self.is_active: + self.is_active = False + if self.task: + self.task.cancel() + self.task = None + + # Print final stats + elapsed = time.time() - self.start_time if self.start_time else 0 + avg_rate = self.token_count / elapsed if elapsed > 0 else 0 + self.console.print( + f"[dim]Completed: {self.token_count} tokens in {elapsed:.1f}s ({avg_rate:.1f} t/s avg)[/dim]" + ) + + # Reset state + self.start_time = None + self.token_count = 0 + self.last_update_time = None + self.last_token_count = 0 + self.current_rate = 0 + + # Reset global rate to 0 to avoid affecting subsequent tasks + global CURRENT_TOKEN_RATE + CURRENT_TOKEN_RATE = 0.0 diff --git a/code_puppy/summarization_agent.py b/code_puppy/summarization_agent.py new file mode 100644 index 00000000..a03e01bc --- /dev/null +++ b/code_puppy/summarization_agent.py @@ -0,0 +1,89 @@ +import asyncio +from concurrent.futures import ThreadPoolExecutor +from typing import List + +from pydantic_ai import Agent + +from code_puppy.config import get_global_model_name +from code_puppy.model_factory import ModelFactory + +# Keep a module-level agent reference to avoid rebuilding per call +_summarization_agent = None + +# Safe sync runner for async agent.run calls +# Avoids "event loop is already running" by offloading to a separate thread loop when needed +_thread_pool: ThreadPoolExecutor | None = None + + +def _ensure_thread_pool(): + global _thread_pool + if _thread_pool is None: + _thread_pool = ThreadPoolExecutor( + max_workers=1, thread_name_prefix="summarizer-loop" + ) + return _thread_pool + + +async def _run_agent_async(agent: Agent, prompt: str, message_history: List): + return await agent.run(prompt, message_history=message_history) + + +def run_summarization_sync(prompt: str, message_history: List) -> List: + agent = get_summarization_agent() + try: + # Try to detect if we're already in an event loop + asyncio.get_running_loop() + + # We're in an event loop: offload to a dedicated thread with its own loop + def _worker(prompt_: str): + return asyncio.run( + _run_agent_async(agent, prompt_, message_history=message_history) + ) + + pool = _ensure_thread_pool() + result = pool.submit(_worker, prompt).result() + except RuntimeError: + # No running loop, safe to run directly + result = asyncio.run( + _run_agent_async(agent, prompt, message_history=message_history) + ) + return result.new_messages() + + +def reload_summarization_agent(): + """Create a specialized agent for summarizing messages when context limit is reached.""" + models_config = ModelFactory.load_config() + model_name = get_global_model_name() + model = ModelFactory.get_model(model_name, models_config) + + # Specialized instructions for summarization + instructions = """You are a message summarization expert. Your task is to summarize conversation messages +while preserving important context and information. The summaries should be concise but capture the essential +content and intent of the original messages. This is to help manage token usage in a conversation history +while maintaining context for the AI to continue the conversation effectively. + +When summarizing: +1. Keep summary brief but informative +2. Preserve key information and decisions +3. Keep any important technical details +4. Don't summarize the system message +5. Make sure all tool calls and responses are summarized, as they are vital""" + + agent = Agent( + model=model, + instructions=instructions, + output_type=str, + retries=1, # Fewer retries for summarization + ) + return agent + + +def get_summarization_agent(force_reload=True): + """ + Retrieve the summarization agent with the currently set MODEL_NAME. + Forces a reload if the model has changed, or if force_reload is passed. + """ + global _summarization_agent + if force_reload or _summarization_agent is None: + _summarization_agent = reload_summarization_agent() + return _summarization_agent diff --git a/code_puppy/tests/test_prompt_toolkit_completion.py b/code_puppy/tests/test_prompt_toolkit_completion.py deleted file mode 100644 index ad01c376..00000000 --- a/code_puppy/tests/test_prompt_toolkit_completion.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest -from prompt_toolkit.document import Document -from code_puppy.command_line.prompt_toolkit_completion import FilePathCompleter - - -class TestFilePathCompleter(unittest.TestCase): - def setUp(self): - self.completer = FilePathCompleter("@") - - def test_no_symbol_in_text(self): - document = Document(text="No symbol here", cursor_position=14) - completions = list(self.completer.get_completions(document, None)) - self.assertEqual(completions, []) - - def test_symbol_with_partial_path(self): - document = Document( - text="Look at this: @code_puppy/com", - cursor_position=len("Look at this: @code_puppy/com"), - ) - completions = list(self.completer.get_completions(document, None)) - expected_completions = [c.text for c in completions] - self.assertTrue( - any( - path.startswith("code_puppy/command_line") - for path in expected_completions - ) - ) - - def test_hidden_files_completion(self): - document = Document( - text="@.", cursor_position=2 - ) # Assuming this is the home or current folder - completions = list(self.completer.get_completions(document, None)) - hidden_files = [c.text for c in completions if c.text.startswith(".")] - self.assertGreater(len(hidden_files), 0) - - -if __name__ == "__main__": - unittest.main() diff --git a/code_puppy/tools/__init__.py b/code_puppy/tools/__init__.py index 6baf85c7..d4d64c7e 100644 --- a/code_puppy/tools/__init__.py +++ b/code_puppy/tools/__init__.py @@ -1,4 +1,167 @@ -import code_puppy.tools.file_modifications -import code_puppy.tools.file_operations -import code_puppy.tools.command_runner -import code_puppy.tools.web_search +from code_puppy.messaging import emit_warning +from code_puppy.tools.agent_tools import register_invoke_agent, register_list_agents + +# Browser automation tools +from code_puppy.tools.browser.browser_control import ( + register_close_browser, + register_create_new_page, + register_get_browser_status, + register_initialize_browser, + register_list_pages, +) +from code_puppy.tools.browser.browser_interactions import ( + register_browser_check, + register_browser_uncheck, + register_click_element, + register_double_click_element, + register_get_element_text, + register_get_element_value, + register_hover_element, + register_select_option, + register_set_element_text, +) +from code_puppy.tools.browser.browser_locators import ( + register_find_buttons, + register_find_by_label, + register_find_by_placeholder, + register_find_by_role, + register_find_by_test_id, + register_find_by_text, + register_find_links, + register_run_xpath_query, +) +from code_puppy.tools.browser.browser_navigation import ( + register_browser_go_back, + register_browser_go_forward, + register_get_page_info, + register_navigate_to_url, + register_reload_page, + register_wait_for_load_state, +) +from code_puppy.tools.browser.browser_screenshot import ( + register_take_screenshot_and_analyze, +) +from code_puppy.tools.browser.browser_scripts import ( + register_browser_clear_highlights, + register_browser_highlight_element, + register_execute_javascript, + register_scroll_page, + register_scroll_to_element, + register_set_viewport_size, + register_wait_for_element, +) +from code_puppy.tools.browser.browser_workflows import ( + register_list_workflows, + register_read_workflow, + register_save_workflow, +) +from code_puppy.tools.command_runner import ( + register_agent_run_shell_command, + register_agent_share_your_reasoning, +) +from code_puppy.tools.file_modifications import register_delete_file, register_edit_file +from code_puppy.tools.file_operations import ( + register_grep, + register_list_files, + register_read_file, +) + +# Map of tool names to their individual registration functions +TOOL_REGISTRY = { + # Agent Tools + "list_agents": register_list_agents, + "invoke_agent": register_invoke_agent, + # File Operations + "list_files": register_list_files, + "read_file": register_read_file, + "grep": register_grep, + # File Modifications + "edit_file": register_edit_file, + "delete_file": register_delete_file, + # Command Runner + "agent_run_shell_command": register_agent_run_shell_command, + "agent_share_your_reasoning": register_agent_share_your_reasoning, + # Browser Control + "browser_initialize": register_initialize_browser, + "browser_close": register_close_browser, + "browser_status": register_get_browser_status, + "browser_new_page": register_create_new_page, + "browser_list_pages": register_list_pages, + # Browser Navigation + "browser_navigate": register_navigate_to_url, + "browser_get_page_info": register_get_page_info, + "browser_go_back": register_browser_go_back, + "browser_go_forward": register_browser_go_forward, + "browser_reload": register_reload_page, + "browser_wait_for_load": register_wait_for_load_state, + # Browser Element Discovery + "browser_find_by_role": register_find_by_role, + "browser_find_by_text": register_find_by_text, + "browser_find_by_label": register_find_by_label, + "browser_find_by_placeholder": register_find_by_placeholder, + "browser_find_by_test_id": register_find_by_test_id, + "browser_xpath_query": register_run_xpath_query, + "browser_find_buttons": register_find_buttons, + "browser_find_links": register_find_links, + # Browser Element Interactions + "browser_click": register_click_element, + "browser_double_click": register_double_click_element, + "browser_hover": register_hover_element, + "browser_set_text": register_set_element_text, + "browser_get_text": register_get_element_text, + "browser_get_value": register_get_element_value, + "browser_select_option": register_select_option, + "browser_check": register_browser_check, + "browser_uncheck": register_browser_uncheck, + # Browser Scripts and Advanced Features + "browser_execute_js": register_execute_javascript, + "browser_scroll": register_scroll_page, + "browser_scroll_to_element": register_scroll_to_element, + "browser_set_viewport": register_set_viewport_size, + "browser_wait_for_element": register_wait_for_element, + "browser_highlight_element": register_browser_highlight_element, + "browser_clear_highlights": register_browser_clear_highlights, + # Browser Screenshots and VQA + "browser_screenshot_analyze": register_take_screenshot_and_analyze, + # Browser Workflows + "browser_save_workflow": register_save_workflow, + "browser_list_workflows": register_list_workflows, + "browser_read_workflow": register_read_workflow, +} + + +def register_tools_for_agent(agent, tool_names: list[str]): + """Register specific tools for an agent based on tool names. + + Args: + agent: The agent to register tools to. + tool_names: List of tool names to register. + """ + for tool_name in tool_names: + if tool_name not in TOOL_REGISTRY: + # Skip unknown tools with a warning instead of failing + emit_warning(f"Warning: Unknown tool '{tool_name}' requested, skipping...") + continue + + # Register the individual tool + register_func = TOOL_REGISTRY[tool_name] + register_func(agent) + + +def register_all_tools(agent): + """Register all available tools to the provided agent. + + Args: + agent: The agent to register tools to. + """ + all_tools = list(TOOL_REGISTRY.keys()) + register_tools_for_agent(agent, all_tools) + + +def get_available_tool_names() -> list[str]: + """Get list of all available tool names. + + Returns: + List of all tool names that can be registered. + """ + return list(TOOL_REGISTRY.keys()) diff --git a/code_puppy/tools/agent_tools.py b/code_puppy/tools/agent_tools.py new file mode 100644 index 00000000..c519d379 --- /dev/null +++ b/code_puppy/tools/agent_tools.py @@ -0,0 +1,178 @@ +# agent_tools.py + +from typing import List + +from pydantic import BaseModel + +# Import Agent from pydantic_ai to create temporary agents for invocation +from pydantic_ai import Agent, RunContext + +from code_puppy.config import get_global_model_name +from code_puppy.messaging import ( + emit_divider, + emit_error, + emit_info, + emit_system_message, +) +from code_puppy.model_factory import ModelFactory +from code_puppy.tools.common import generate_group_id + + +class AgentInfo(BaseModel): + """Information about an available agent.""" + + name: str + display_name: str + + +class ListAgentsOutput(BaseModel): + """Output for the list_agents tool.""" + + agents: List[AgentInfo] + error: str | None = None + + +class AgentInvokeOutput(BaseModel): + """Output for the invoke_agent tool.""" + + response: str | None + agent_name: str + error: str | None = None + + +def register_list_agents(agent): + """Register the list_agents tool with the provided agent. + + Args: + agent: The agent to register the tool with + """ + + @agent.tool + def list_agents(context: RunContext) -> ListAgentsOutput: + """List all available sub-agents that can be invoked. + + Returns: + ListAgentsOutput: A list of available agents with their names and display names. + """ + # Generate a group ID for this tool execution + group_id = generate_group_id("list_agents") + + emit_info( + "\n[bold white on blue] LIST AGENTS [/bold white on blue]", + message_group=group_id, + ) + emit_divider(message_group=group_id) + + try: + from code_puppy.agents import get_available_agents + + # Get available agents from the agent manager + agents_dict = get_available_agents() + + # Convert to list of AgentInfo objects + agents = [ + AgentInfo(name=name, display_name=display_name) + for name, display_name in agents_dict.items() + ] + + # Display the agents in the console + for agent_item in agents: + emit_system_message( + f"- [bold]{agent_item.name}[/bold]: {agent_item.display_name}", + message_group=group_id, + ) + + emit_divider(message_group=group_id) + return ListAgentsOutput(agents=agents) + + except Exception as e: + error_msg = f"Error listing agents: {str(e)}" + emit_error(error_msg, message_group=group_id) + emit_divider(message_group=group_id) + return ListAgentsOutput(agents=[], error=error_msg) + + return list_agents + + +def register_invoke_agent(agent): + """Register the invoke_agent tool with the provided agent. + + Args: + agent: The agent to register the tool with + """ + + @agent.tool + def invoke_agent( + context: RunContext, agent_name: str, prompt: str + ) -> AgentInvokeOutput: + """Invoke a specific sub-agent with a given prompt. + + Args: + agent_name: The name of the agent to invoke + prompt: The prompt to send to the agent + + Returns: + AgentInvokeOutput: The agent's response to the prompt + """ + from code_puppy.agents.agent_manager import load_agent + + # Generate a group ID for this tool execution + group_id = generate_group_id("invoke_agent", agent_name) + + emit_info( + f"\n[bold white on blue] INVOKE AGENT [/bold white on blue] {agent_name}", + message_group=group_id, + ) + emit_divider(message_group=group_id) + emit_system_message(f"Prompt: {prompt}", message_group=group_id) + emit_divider(message_group=group_id) + + try: + # Load the specified agent config + agent_config = load_agent(agent_name) + + # Get the current model for creating a temporary agent + model_name = get_global_model_name() + models_config = ModelFactory.load_config() + + # Only proceed if we have a valid model configuration + if model_name not in models_config: + raise ValueError(f"Model '{model_name}' not found in configuration") + + model = ModelFactory.get_model(model_name, models_config) + + # Create a temporary agent instance to avoid interfering with current agent state + instructions = agent_config.get_system_prompt() + temp_agent = Agent( + model=model, + instructions=instructions, + output_type=str, + retries=3, + ) + + # Register the tools that the agent needs + from code_puppy.tools import register_tools_for_agent + + agent_tools = agent_config.get_available_tools() + register_tools_for_agent(temp_agent, agent_tools) + + # Run the temporary agent with the provided prompt + result = temp_agent.run_sync(prompt) + + # Extract the response from the result + response = result.output + + emit_system_message(f"Response: {response}", message_group=group_id) + emit_divider(message_group=group_id) + + return AgentInvokeOutput(response=response, agent_name=agent_name) + + except Exception as e: + error_msg = f"Error invoking agent '{agent_name}': {str(e)}" + emit_error(error_msg, message_group=group_id) + emit_divider(message_group=group_id) + return AgentInvokeOutput( + response=None, agent_name=agent_name, error=error_msg + ) + + return invoke_agent diff --git a/code_puppy/tools/browser/__init__.py b/code_puppy/tools/browser/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/code_puppy/tools/browser/browser_control.py b/code_puppy/tools/browser/browser_control.py new file mode 100644 index 00000000..858366c7 --- /dev/null +++ b/code_puppy/tools/browser/browser_control.py @@ -0,0 +1,293 @@ +"""Browser initialization and control tools.""" + +from typing import Any, Dict, Optional + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager + + +async def initialize_browser( + headless: bool = False, + browser_type: str = "chromium", + homepage: str = "https://www.google.com", +) -> Dict[str, Any]: + """Initialize the browser with specified settings.""" + group_id = generate_group_id("browser_initialize", f"{browser_type}_{homepage}") + emit_info( + f"[bold white on blue] BROWSER INITIALIZE [/bold white on blue] 🌐 {browser_type} → {homepage}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + + # Configure browser settings + browser_manager.headless = headless + browser_manager.browser_type = browser_type + browser_manager.homepage = homepage + + # Initialize browser + await browser_manager.async_initialize() + + # Get page info + page = await browser_manager.get_current_page() + if page: + url = page.url + title = await page.title() + else: + url = "Unknown" + title = "Unknown" + + emit_info( + "[green]Browser initialized successfully[/green]", message_group=group_id + ) + + return { + "success": True, + "browser_type": browser_type, + "headless": headless, + "homepage": homepage, + "current_url": url, + "current_title": title, + } + + except Exception as e: + emit_info( + f"[red]Browser initialization failed: {str(e)}[/red]", + message_group=group_id, + ) + return { + "success": False, + "error": str(e), + "browser_type": browser_type, + "headless": headless, + } + + +async def close_browser() -> Dict[str, Any]: + """Close the browser and clean up resources.""" + group_id = generate_group_id("browser_close") + emit_info( + "[bold white on blue] BROWSER CLOSE [/bold white on blue] 🔒", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + await browser_manager.close() + + emit_info( + "[yellow]Browser closed successfully[/yellow]", message_group=group_id + ) + + return {"success": True, "message": "Browser closed"} + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def get_browser_status() -> Dict[str, Any]: + """Get current browser status and information.""" + group_id = generate_group_id("browser_status") + emit_info( + "[bold white on blue] BROWSER STATUS [/bold white on blue] 📊", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + + if not browser_manager._initialized: + return { + "success": True, + "status": "not_initialized", + "browser_type": browser_manager.browser_type, + "headless": browser_manager.headless, + } + + page = await browser_manager.get_current_page() + if page: + url = page.url + title = await page.title() + + # Get all pages + all_pages = await browser_manager.get_all_pages() + page_count = len(all_pages) + else: + url = None + title = None + page_count = 0 + + return { + "success": True, + "status": "initialized", + "browser_type": browser_manager.browser_type, + "headless": browser_manager.headless, + "current_url": url, + "current_title": title, + "page_count": page_count, + } + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def create_new_page(url: Optional[str] = None) -> Dict[str, Any]: + """Create a new browser page/tab.""" + group_id = generate_group_id("browser_new_page", url or "blank") + emit_info( + f"[bold white on blue] BROWSER NEW PAGE [/bold white on blue] 📄 {url or 'blank page'}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + + if not browser_manager._initialized: + return { + "success": False, + "error": "Browser not initialized. Use browser_initialize first.", + } + + page = await browser_manager.new_page(url) + + final_url = page.url + title = await page.title() + + emit_info( + f"[green]Created new page: {final_url}[/green]", message_group=group_id + ) + + return {"success": True, "url": final_url, "title": title, "requested_url": url} + + except Exception as e: + return {"success": False, "error": str(e), "url": url} + + +async def list_pages() -> Dict[str, Any]: + """List all open browser pages/tabs.""" + group_id = generate_group_id("browser_list_pages") + emit_info( + "[bold white on blue] BROWSER LIST PAGES [/bold white on blue] 📋", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + + if not browser_manager._initialized: + return {"success": False, "error": "Browser not initialized"} + + all_pages = await browser_manager.get_all_pages() + + pages_info = [] + for i, page in enumerate(all_pages): + try: + url = page.url + title = await page.title() + is_closed = page.is_closed() + + pages_info.append( + {"index": i, "url": url, "title": title, "closed": is_closed} + ) + except Exception as e: + pages_info.append( + { + "index": i, + "url": "Error", + "title": "Error", + "error": str(e), + "closed": True, + } + ) + + return {"success": True, "page_count": len(all_pages), "pages": pages_info} + + except Exception as e: + return {"success": False, "error": str(e)} + + +# Tool registration functions +def register_initialize_browser(agent): + """Register the browser initialization tool.""" + + @agent.tool + async def browser_initialize( + context: RunContext, + headless: bool = False, + browser_type: str = "chromium", + homepage: str = "https://www.google.com", + ) -> Dict[str, Any]: + """ + Initialize the browser with specified settings. Must be called before using other browser tools. + + Args: + headless: Run browser in headless mode (no GUI) + browser_type: Browser engine (chromium, firefox, webkit) + homepage: Initial page to load + + Returns: + Dict with initialization results + """ + return await initialize_browser(headless, browser_type, homepage) + + +def register_close_browser(agent): + """Register the browser close tool.""" + + @agent.tool + async def browser_close(context: RunContext) -> Dict[str, Any]: + """ + Close the browser and clean up all resources. + + Returns: + Dict with close results + """ + return await close_browser() + + +def register_get_browser_status(agent): + """Register the browser status tool.""" + + @agent.tool + async def browser_status(context: RunContext) -> Dict[str, Any]: + """ + Get current browser status and information. + + Returns: + Dict with browser status and metadata + """ + return await get_browser_status() + + +def register_create_new_page(agent): + """Register the new page creation tool.""" + + @agent.tool + async def browser_new_page( + context: RunContext, + url: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Create a new browser page/tab. + + Args: + url: Optional URL to navigate to in the new page + + Returns: + Dict with new page results + """ + return await create_new_page(url) + + +def register_list_pages(agent): + """Register the list pages tool.""" + + @agent.tool + async def browser_list_pages(context: RunContext) -> Dict[str, Any]: + """ + List all open browser pages/tabs. + + Returns: + Dict with information about all open pages + """ + return await list_pages() diff --git a/code_puppy/tools/browser/browser_interactions.py b/code_puppy/tools/browser/browser_interactions.py new file mode 100644 index 00000000..fffbee45 --- /dev/null +++ b/code_puppy/tools/browser/browser_interactions.py @@ -0,0 +1,552 @@ +"""Browser element interaction tools for clicking, typing, and form manipulation.""" + +from typing import Any, Dict, List, Optional + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager + + +async def click_element( + selector: str, + timeout: int = 10000, + force: bool = False, + button: str = "left", + modifiers: Optional[List[str]] = None, +) -> Dict[str, Any]: + """Click on an element.""" + group_id = generate_group_id("browser_click", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER CLICK [/bold white on blue] 🖱️ selector='{selector}' button={button}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Find element + element = page.locator(selector) + + # Wait for element to be visible and enabled + await element.wait_for(state="visible", timeout=timeout) + + # Click options + click_options = { + "force": force, + "button": button, + "timeout": timeout, + } + + if modifiers: + click_options["modifiers"] = modifiers + + await element.click(**click_options) + + emit_info(f"[green]Clicked element: {selector}[/green]", message_group=group_id) + + return {"success": True, "selector": selector, "action": f"{button}_click"} + + except Exception as e: + emit_info(f"[red]Click failed: {str(e)}[/red]", message_group=group_id) + return {"success": False, "error": str(e), "selector": selector} + + +async def double_click_element( + selector: str, + timeout: int = 10000, + force: bool = False, +) -> Dict[str, Any]: + """Double-click on an element.""" + group_id = generate_group_id("browser_double_click", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER DOUBLE CLICK [/bold white on blue] 🖱️🖱️ selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + await element.dblclick(force=force, timeout=timeout) + + emit_info( + f"[green]Double-clicked element: {selector}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "action": "double_click"} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def hover_element( + selector: str, + timeout: int = 10000, + force: bool = False, +) -> Dict[str, Any]: + """Hover over an element.""" + group_id = generate_group_id("browser_hover", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER HOVER [/bold white on blue] 👆 selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + await element.hover(force=force, timeout=timeout) + + emit_info( + f"[green]Hovered over element: {selector}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "action": "hover"} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def set_element_text( + selector: str, + text: str, + clear_first: bool = True, + timeout: int = 10000, +) -> Dict[str, Any]: + """Set text in an input element.""" + group_id = generate_group_id("browser_set_text", f"{selector[:50]}_{text[:30]}") + emit_info( + f"[bold white on blue] BROWSER SET TEXT [/bold white on blue] ✏️ selector='{selector}' text='{text[:50]}{'...' if len(text) > 50 else ''}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + + if clear_first: + await element.clear(timeout=timeout) + + await element.fill(text, timeout=timeout) + + emit_info( + f"[green]Set text in element: {selector}[/green]", message_group=group_id + ) + + return { + "success": True, + "selector": selector, + "text": text, + "action": "set_text", + } + + except Exception as e: + emit_info(f"[red]Set text failed: {str(e)}[/red]", message_group=group_id) + return {"success": False, "error": str(e), "selector": selector, "text": text} + + +async def get_element_text( + selector: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Get text content from an element.""" + group_id = generate_group_id("browser_get_text", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER GET TEXT [/bold white on blue] 📝 selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + + text = await element.text_content() + + return {"success": True, "selector": selector, "text": text} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def get_element_value( + selector: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Get value from an input element.""" + group_id = generate_group_id("browser_get_value", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER GET VALUE [/bold white on blue] 📎 selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + + value = await element.input_value() + + return {"success": True, "selector": selector, "value": value} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def select_option( + selector: str, + value: Optional[str] = None, + label: Optional[str] = None, + index: Optional[int] = None, + timeout: int = 10000, +) -> Dict[str, Any]: + """Select an option in a dropdown/select element.""" + option_desc = value or label or str(index) if index is not None else "unknown" + group_id = generate_group_id( + "browser_select_option", f"{selector[:50]}_{option_desc}" + ) + emit_info( + f"[bold white on blue] BROWSER SELECT OPTION [/bold white on blue] 📄 selector='{selector}' option='{option_desc}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + + if value is not None: + await element.select_option(value=value, timeout=timeout) + selection = value + elif label is not None: + await element.select_option(label=label, timeout=timeout) + selection = label + elif index is not None: + await element.select_option(index=index, timeout=timeout) + selection = str(index) + else: + return { + "success": False, + "error": "Must specify value, label, or index", + "selector": selector, + } + + emit_info( + f"[green]Selected option in {selector}: {selection}[/green]", + message_group=group_id, + ) + + return {"success": True, "selector": selector, "selection": selection} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def check_element( + selector: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Check a checkbox or radio button.""" + group_id = generate_group_id("browser_check", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER CHECK [/bold white on blue] ☑️ selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + await element.check(timeout=timeout) + + emit_info(f"[green]Checked element: {selector}[/green]", message_group=group_id) + + return {"success": True, "selector": selector, "action": "check"} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def uncheck_element( + selector: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Uncheck a checkbox.""" + group_id = generate_group_id("browser_uncheck", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER UNCHECK [/bold white on blue] ☐️ selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + await element.uncheck(timeout=timeout) + + emit_info( + f"[green]Unchecked element: {selector}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "action": "uncheck"} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +# Tool registration functions +def register_click_element(agent): + """Register the click element tool.""" + + @agent.tool + async def browser_click( + context: RunContext, + selector: str, + timeout: int = 10000, + force: bool = False, + button: str = "left", + modifiers: Optional[List[str]] = None, + ) -> Dict[str, Any]: + """ + Click on an element in the browser. + + Args: + selector: CSS or XPath selector for the element + timeout: Timeout in milliseconds to wait for element + force: Skip actionability checks and force the click + button: Mouse button to click (left, right, middle) + modifiers: Modifier keys to hold (Alt, Control, Meta, Shift) + + Returns: + Dict with click results + """ + return await click_element(selector, timeout, force, button, modifiers) + + +def register_double_click_element(agent): + """Register the double-click element tool.""" + + @agent.tool + async def browser_double_click( + context: RunContext, + selector: str, + timeout: int = 10000, + force: bool = False, + ) -> Dict[str, Any]: + """ + Double-click on an element in the browser. + + Args: + selector: CSS or XPath selector for the element + timeout: Timeout in milliseconds to wait for element + force: Skip actionability checks and force the double-click + + Returns: + Dict with double-click results + """ + return await double_click_element(selector, timeout, force) + + +def register_hover_element(agent): + """Register the hover element tool.""" + + @agent.tool + async def browser_hover( + context: RunContext, + selector: str, + timeout: int = 10000, + force: bool = False, + ) -> Dict[str, Any]: + """ + Hover over an element in the browser. + + Args: + selector: CSS or XPath selector for the element + timeout: Timeout in milliseconds to wait for element + force: Skip actionability checks and force the hover + + Returns: + Dict with hover results + """ + return await hover_element(selector, timeout, force) + + +def register_set_element_text(agent): + """Register the set element text tool.""" + + @agent.tool + async def browser_set_text( + context: RunContext, + selector: str, + text: str, + clear_first: bool = True, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Set text in an input element. + + Args: + selector: CSS or XPath selector for the input element + text: Text to enter + clear_first: Whether to clear existing text first + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with text input results + """ + return await set_element_text(selector, text, clear_first, timeout) + + +def register_get_element_text(agent): + """Register the get element text tool.""" + + @agent.tool + async def browser_get_text( + context: RunContext, + selector: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Get text content from an element. + + Args: + selector: CSS or XPath selector for the element + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with element text content + """ + return await get_element_text(selector, timeout) + + +def register_get_element_value(agent): + """Register the get element value tool.""" + + @agent.tool + async def browser_get_value( + context: RunContext, + selector: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Get value from an input element. + + Args: + selector: CSS or XPath selector for the input element + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with element value + """ + return await get_element_value(selector, timeout) + + +def register_select_option(agent): + """Register the select option tool.""" + + @agent.tool + async def browser_select_option( + context: RunContext, + selector: str, + value: Optional[str] = None, + label: Optional[str] = None, + index: Optional[int] = None, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Select an option in a dropdown/select element. + + Args: + selector: CSS or XPath selector for the select element + value: Option value to select + label: Option label text to select + index: Option index to select (0-based) + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with selection results + """ + return await select_option(selector, value, label, index, timeout) + + +def register_browser_check(agent): + """Register checkbox/radio button check tool.""" + + @agent.tool + async def browser_check( + context: RunContext, + selector: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Check a checkbox or radio button. + + Args: + selector: CSS or XPath selector for the checkbox/radio + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with check results + """ + return await check_element(selector, timeout) + + +def register_browser_uncheck(agent): + """Register checkbox uncheck tool.""" + + @agent.tool + async def browser_uncheck( + context: RunContext, + selector: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Uncheck a checkbox. + + Args: + selector: CSS or XPath selector for the checkbox + timeout: Timeout in milliseconds to wait for element + + Returns: + Dict with uncheck results + """ + return await uncheck_element(selector, timeout) diff --git a/code_puppy/tools/browser/browser_locators.py b/code_puppy/tools/browser/browser_locators.py new file mode 100644 index 00000000..2f9a5361 --- /dev/null +++ b/code_puppy/tools/browser/browser_locators.py @@ -0,0 +1,642 @@ +"""Browser element discovery tools using semantic locators and XPath.""" + +from typing import Any, Dict, Optional + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager + + +async def find_by_role( + role: str, + name: Optional[str] = None, + exact: bool = False, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find elements by ARIA role.""" + group_id = generate_group_id("browser_find_by_role", f"{role}_{name or 'any'}") + emit_info( + f"[bold white on blue] BROWSER FIND BY ROLE [/bold white on blue] 🎨 role={role} name={name}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Build locator + locator = page.get_by_role(role, name=name, exact=exact) + + # Wait for at least one element + await locator.first.wait_for(state="visible", timeout=timeout) + + # Count elements + count = await locator.count() + + # Get element info + elements = [] + for i in range(min(count, 10)): # Limit to first 10 elements + element = locator.nth(i) + if await element.is_visible(): + text = await element.text_content() + elements.append({"index": i, "text": text, "visible": True}) + + emit_info( + f"[green]Found {count} elements with role '{role}'[/green]", + message_group=group_id, + ) + + return { + "success": True, + "role": role, + "name": name, + "count": count, + "elements": elements, + } + + except Exception as e: + return {"success": False, "error": str(e), "role": role, "name": name} + + +async def find_by_text( + text: str, + exact: bool = False, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find elements containing specific text.""" + group_id = generate_group_id("browser_find_by_text", text[:50]) + emit_info( + f"[bold white on blue] BROWSER FIND BY TEXT [/bold white on blue] 🔍 text='{text}' exact={exact}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + locator = page.get_by_text(text, exact=exact) + + # Wait for at least one element + await locator.first.wait_for(state="visible", timeout=timeout) + + count = await locator.count() + + elements = [] + for i in range(min(count, 10)): + element = locator.nth(i) + if await element.is_visible(): + tag_name = await element.evaluate("el => el.tagName.toLowerCase()") + full_text = await element.text_content() + elements.append( + {"index": i, "tag": tag_name, "text": full_text, "visible": True} + ) + + emit_info( + f"[green]Found {count} elements containing text '{text}'[/green]", + message_group=group_id, + ) + + return { + "success": True, + "search_text": text, + "exact": exact, + "count": count, + "elements": elements, + } + + except Exception as e: + return {"success": False, "error": str(e), "search_text": text} + + +async def find_by_label( + text: str, + exact: bool = False, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find form elements by their associated label text.""" + group_id = generate_group_id("browser_find_by_label", text[:50]) + emit_info( + f"[bold white on blue] BROWSER FIND BY LABEL [/bold white on blue] 🏷️ label='{text}' exact={exact}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + locator = page.get_by_label(text, exact=exact) + + await locator.first.wait_for(state="visible", timeout=timeout) + + count = await locator.count() + + elements = [] + for i in range(min(count, 10)): + element = locator.nth(i) + if await element.is_visible(): + tag_name = await element.evaluate("el => el.tagName.toLowerCase()") + input_type = await element.get_attribute("type") + value = ( + await element.input_value() + if tag_name in ["input", "textarea"] + else None + ) + + elements.append( + { + "index": i, + "tag": tag_name, + "type": input_type, + "value": value, + "visible": True, + } + ) + + emit_info( + f"[green]Found {count} elements with label '{text}'[/green]", + message_group=group_id, + ) + + return { + "success": True, + "label_text": text, + "exact": exact, + "count": count, + "elements": elements, + } + + except Exception as e: + return {"success": False, "error": str(e), "label_text": text} + + +async def find_by_placeholder( + text: str, + exact: bool = False, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find elements by placeholder text.""" + group_id = generate_group_id("browser_find_by_placeholder", text[:50]) + emit_info( + f"[bold white on blue] BROWSER FIND BY PLACEHOLDER [/bold white on blue] 📝 placeholder='{text}' exact={exact}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + locator = page.get_by_placeholder(text, exact=exact) + + await locator.first.wait_for(state="visible", timeout=timeout) + + count = await locator.count() + + elements = [] + for i in range(min(count, 10)): + element = locator.nth(i) + if await element.is_visible(): + tag_name = await element.evaluate("el => el.tagName.toLowerCase()") + placeholder = await element.get_attribute("placeholder") + value = await element.input_value() + + elements.append( + { + "index": i, + "tag": tag_name, + "placeholder": placeholder, + "value": value, + "visible": True, + } + ) + + emit_info( + f"[green]Found {count} elements with placeholder '{text}'[/green]", + message_group=group_id, + ) + + return { + "success": True, + "placeholder_text": text, + "exact": exact, + "count": count, + "elements": elements, + } + + except Exception as e: + return {"success": False, "error": str(e), "placeholder_text": text} + + +async def find_by_test_id( + test_id: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find elements by test ID attribute.""" + group_id = generate_group_id("browser_find_by_test_id", test_id) + emit_info( + f"[bold white on blue] BROWSER FIND BY TEST ID [/bold white on blue] 🧪 test_id='{test_id}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + locator = page.get_by_test_id(test_id) + + await locator.first.wait_for(state="visible", timeout=timeout) + + count = await locator.count() + + elements = [] + for i in range(min(count, 10)): + element = locator.nth(i) + if await element.is_visible(): + tag_name = await element.evaluate("el => el.tagName.toLowerCase()") + text = await element.text_content() + + elements.append( + { + "index": i, + "tag": tag_name, + "text": text, + "test_id": test_id, + "visible": True, + } + ) + + emit_info( + f"[green]Found {count} elements with test-id '{test_id}'[/green]", + message_group=group_id, + ) + + return { + "success": True, + "test_id": test_id, + "count": count, + "elements": elements, + } + + except Exception as e: + return {"success": False, "error": str(e), "test_id": test_id} + + +async def run_xpath_query( + xpath: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Find elements using XPath selector.""" + group_id = generate_group_id("browser_xpath_query", xpath[:100]) + emit_info( + f"[bold white on blue] BROWSER XPATH QUERY [/bold white on blue] 🔍 xpath='{xpath}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Use page.locator with xpath + locator = page.locator(f"xpath={xpath}") + + # Wait for at least one element + await locator.first.wait_for(state="visible", timeout=timeout) + + count = await locator.count() + + elements = [] + for i in range(min(count, 10)): + element = locator.nth(i) + if await element.is_visible(): + tag_name = await element.evaluate("el => el.tagName.toLowerCase()") + text = await element.text_content() + class_name = await element.get_attribute("class") + element_id = await element.get_attribute("id") + + elements.append( + { + "index": i, + "tag": tag_name, + "text": text[:100] if text else None, # Truncate long text + "class": class_name, + "id": element_id, + "visible": True, + } + ) + + emit_info( + f"[green]Found {count} elements with XPath '{xpath}'[/green]", + message_group=group_id, + ) + + return {"success": True, "xpath": xpath, "count": count, "elements": elements} + + except Exception as e: + return {"success": False, "error": str(e), "xpath": xpath} + + +async def find_buttons( + text_filter: Optional[str] = None, timeout: int = 10000 +) -> Dict[str, Any]: + """Find all button elements on the page.""" + group_id = generate_group_id("browser_find_buttons", text_filter or "all") + emit_info( + f"[bold white on blue] BROWSER FIND BUTTONS [/bold white on blue] 🔘 filter='{text_filter or 'none'}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Find buttons by role + locator = page.get_by_role("button") + + count = await locator.count() + + buttons = [] + for i in range(min(count, 20)): # Limit to 20 buttons + button = locator.nth(i) + if await button.is_visible(): + text = await button.text_content() + if text_filter and text_filter.lower() not in text.lower(): + continue + + buttons.append({"index": i, "text": text, "visible": True}) + + filtered_count = len(buttons) + + emit_info( + f"[green]Found {filtered_count} buttons" + + (f" containing '{text_filter}'" if text_filter else "") + + "[/green]", + message_group=group_id, + ) + + return { + "success": True, + "text_filter": text_filter, + "total_count": count, + "filtered_count": filtered_count, + "buttons": buttons, + } + + except Exception as e: + return {"success": False, "error": str(e), "text_filter": text_filter} + + +async def find_links( + text_filter: Optional[str] = None, timeout: int = 10000 +) -> Dict[str, Any]: + """Find all link elements on the page.""" + group_id = generate_group_id("browser_find_links", text_filter or "all") + emit_info( + f"[bold white on blue] BROWSER FIND LINKS [/bold white on blue] 🔗 filter='{text_filter or 'none'}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Find links by role + locator = page.get_by_role("link") + + count = await locator.count() + + links = [] + for i in range(min(count, 20)): # Limit to 20 links + link = locator.nth(i) + if await link.is_visible(): + text = await link.text_content() + href = await link.get_attribute("href") + + if text_filter and text_filter.lower() not in text.lower(): + continue + + links.append({"index": i, "text": text, "href": href, "visible": True}) + + filtered_count = len(links) + + emit_info( + f"[green]Found {filtered_count} links" + + (f" containing '{text_filter}'" if text_filter else "") + + "[/green]", + message_group=group_id, + ) + + return { + "success": True, + "text_filter": text_filter, + "total_count": count, + "filtered_count": filtered_count, + "links": links, + } + + except Exception as e: + return {"success": False, "error": str(e), "text_filter": text_filter} + + +# Tool registration functions +def register_find_by_role(agent): + """Register the find by role tool.""" + + @agent.tool + async def browser_find_by_role( + context: RunContext, + role: str, + name: Optional[str] = None, + exact: bool = False, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find elements by ARIA role (recommended for accessibility). + + Args: + role: ARIA role (button, link, textbox, heading, etc.) + name: Optional accessible name to filter by + exact: Whether to match name exactly + timeout: Timeout in milliseconds + + Returns: + Dict with found elements and their properties + """ + return await find_by_role(role, name, exact, timeout) + + +def register_find_by_text(agent): + """Register the find by text tool.""" + + @agent.tool + async def browser_find_by_text( + context: RunContext, + text: str, + exact: bool = False, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find elements containing specific text content. + + Args: + text: Text to search for + exact: Whether to match text exactly + timeout: Timeout in milliseconds + + Returns: + Dict with found elements and their properties + """ + return await find_by_text(text, exact, timeout) + + +def register_find_by_label(agent): + """Register the find by label tool.""" + + @agent.tool + async def browser_find_by_label( + context: RunContext, + text: str, + exact: bool = False, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find form elements by their associated label text. + + Args: + text: Label text to search for + exact: Whether to match label exactly + timeout: Timeout in milliseconds + + Returns: + Dict with found form elements and their properties + """ + return await find_by_label(text, exact, timeout) + + +def register_find_by_placeholder(agent): + """Register the find by placeholder tool.""" + + @agent.tool + async def browser_find_by_placeholder( + context: RunContext, + text: str, + exact: bool = False, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find elements by placeholder text. + + Args: + text: Placeholder text to search for + exact: Whether to match placeholder exactly + timeout: Timeout in milliseconds + + Returns: + Dict with found elements and their properties + """ + return await find_by_placeholder(text, exact, timeout) + + +def register_find_by_test_id(agent): + """Register the find by test ID tool.""" + + @agent.tool + async def browser_find_by_test_id( + context: RunContext, + test_id: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find elements by test ID attribute (data-testid). + + Args: + test_id: Test ID to search for + timeout: Timeout in milliseconds + + Returns: + Dict with found elements and their properties + """ + return await find_by_test_id(test_id, timeout) + + +def register_run_xpath_query(agent): + """Register the XPath query tool.""" + + @agent.tool + async def browser_xpath_query( + context: RunContext, + xpath: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find elements using XPath selector (fallback when semantic locators fail). + + Args: + xpath: XPath expression + timeout: Timeout in milliseconds + + Returns: + Dict with found elements and their properties + """ + return await run_xpath_query(xpath, timeout) + + +def register_find_buttons(agent): + """Register the find buttons tool.""" + + @agent.tool + async def browser_find_buttons( + context: RunContext, + text_filter: Optional[str] = None, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find all button elements on the page. + + Args: + text_filter: Optional text to filter buttons by + timeout: Timeout in milliseconds + + Returns: + Dict with found buttons and their properties + """ + return await find_buttons(text_filter, timeout) + + +def register_find_links(agent): + """Register the find links tool.""" + + @agent.tool + async def browser_find_links( + context: RunContext, + text_filter: Optional[str] = None, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Find all link elements on the page. + + Args: + text_filter: Optional text to filter links by + timeout: Timeout in milliseconds + + Returns: + Dict with found links and their properties + """ + return await find_links(text_filter, timeout) diff --git a/code_puppy/tools/browser/browser_navigation.py b/code_puppy/tools/browser/browser_navigation.py new file mode 100644 index 00000000..f02ca17f --- /dev/null +++ b/code_puppy/tools/browser/browser_navigation.py @@ -0,0 +1,251 @@ +"""Browser navigation and control tools.""" + +from typing import Any, Dict + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager + + +async def navigate_to_url(url: str) -> Dict[str, Any]: + """Navigate to a specific URL.""" + group_id = generate_group_id("browser_navigate", url) + emit_info( + f"[bold white on blue] BROWSER NAVIGATE [/bold white on blue] 🌐 {url}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Navigate to URL + await page.goto(url, wait_until="domcontentloaded", timeout=30000) + + # Get final URL (in case of redirects) + final_url = page.url + title = await page.title() + + emit_info(f"[green]Navigated to: {final_url}[/green]", message_group=group_id) + + return {"success": True, "url": final_url, "title": title, "requested_url": url} + + except Exception as e: + emit_info(f"[red]Navigation failed: {str(e)}[/red]", message_group=group_id) + return {"success": False, "error": str(e), "url": url} + + +async def get_page_info() -> Dict[str, Any]: + """Get current page information.""" + group_id = generate_group_id("browser_get_page_info") + emit_info( + "[bold white on blue] BROWSER GET PAGE INFO [/bold white on blue] 📌", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + url = page.url + title = await page.title() + + return {"success": True, "url": url, "title": title} + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def go_back() -> Dict[str, Any]: + """Navigate back in browser history.""" + group_id = generate_group_id("browser_go_back") + emit_info( + "[bold white on blue] BROWSER GO BACK [/bold white on blue] ⬅️", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + await page.go_back(wait_until="domcontentloaded") + + return {"success": True, "url": page.url, "title": await page.title()} + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def go_forward() -> Dict[str, Any]: + """Navigate forward in browser history.""" + group_id = generate_group_id("browser_go_forward") + emit_info( + "[bold white on blue] BROWSER GO FORWARD [/bold white on blue] ➡️", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + await page.go_forward(wait_until="domcontentloaded") + + return {"success": True, "url": page.url, "title": await page.title()} + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def reload_page(wait_until: str = "domcontentloaded") -> Dict[str, Any]: + """Reload the current page.""" + group_id = generate_group_id("browser_reload", wait_until) + emit_info( + f"[bold white on blue] BROWSER RELOAD [/bold white on blue] 🔄 wait_until={wait_until}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + await page.reload(wait_until=wait_until) + + return {"success": True, "url": page.url, "title": await page.title()} + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def wait_for_load_state( + state: str = "domcontentloaded", timeout: int = 30000 +) -> Dict[str, Any]: + """Wait for page to reach a specific load state.""" + group_id = generate_group_id("browser_wait_for_load", f"{state}_{timeout}") + emit_info( + f"[bold white on blue] BROWSER WAIT FOR LOAD [/bold white on blue] ⏱️ state={state} timeout={timeout}ms", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + await page.wait_for_load_state(state, timeout=timeout) + + return {"success": True, "state": state, "url": page.url} + + except Exception as e: + return {"success": False, "error": str(e), "state": state} + + +def register_navigate_to_url(agent): + """Register the navigation tool.""" + + @agent.tool + async def browser_navigate(context: RunContext, url: str) -> Dict[str, Any]: + """ + Navigate the browser to a specific URL. + + Args: + url: The URL to navigate to (must include protocol like https://) + + Returns: + Dict with navigation results including final URL and page title + """ + return await navigate_to_url(url) + + +def register_get_page_info(agent): + """Register the page info tool.""" + + @agent.tool + async def browser_get_page_info(context: RunContext) -> Dict[str, Any]: + """ + Get information about the current page. + + Returns: + Dict with current URL and page title + """ + return await get_page_info() + + +def register_browser_go_back(agent): + """Register browser go back tool.""" + + @agent.tool + async def browser_go_back(context: RunContext) -> Dict[str, Any]: + """ + Navigate back in browser history. + + Returns: + Dict with navigation results + """ + return await go_back() + + +def register_browser_go_forward(agent): + """Register browser go forward tool.""" + + @agent.tool + async def browser_go_forward(context: RunContext) -> Dict[str, Any]: + """ + Navigate forward in browser history. + + Returns: + Dict with navigation results + """ + return await go_forward() + + +def register_reload_page(agent): + """Register the page reload tool.""" + + @agent.tool + async def browser_reload( + context: RunContext, wait_until: str = "domcontentloaded" + ) -> Dict[str, Any]: + """ + Reload the current page. + + Args: + wait_until: Load state to wait for (networkidle, domcontentloaded, load) + + Returns: + Dict with reload results + """ + return await reload_page(wait_until) + + +def register_wait_for_load_state(agent): + """Register the wait for load state tool.""" + + @agent.tool + async def browser_wait_for_load( + context: RunContext, state: str = "domcontentloaded", timeout: int = 30000 + ) -> Dict[str, Any]: + """ + Wait for the page to reach a specific load state. + + Args: + state: Load state to wait for (networkidle, domcontentloaded, load) + timeout: Timeout in milliseconds + + Returns: + Dict with wait results + """ + return await wait_for_load_state(state, timeout) diff --git a/code_puppy/tools/browser/browser_screenshot.py b/code_puppy/tools/browser/browser_screenshot.py new file mode 100644 index 00000000..7c87d248 --- /dev/null +++ b/code_puppy/tools/browser/browser_screenshot.py @@ -0,0 +1,243 @@ +"""Screenshot and visual analysis tool with VQA capabilities.""" + +import asyncio +from datetime import datetime +from pathlib import Path +from tempfile import gettempdir, mkdtemp +from typing import Any, Dict, Optional + +from pydantic import BaseModel +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_error, emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager +from .vqa_agent import run_vqa_analysis + +_TEMP_SCREENSHOT_ROOT = Path( + mkdtemp(prefix="code_puppy_screenshots_", dir=gettempdir()) +) + + +def _build_screenshot_path(timestamp: str) -> Path: + """Return the target path for a screenshot using a shared temp directory.""" + filename = f"screenshot_{timestamp}.png" + return _TEMP_SCREENSHOT_ROOT / filename + + +class ScreenshotResult(BaseModel): + """Result from screenshot operation.""" + + success: bool + screenshot_path: Optional[str] = None + screenshot_data: Optional[bytes] = None + timestamp: Optional[str] = None + error: Optional[str] = None + + +async def _capture_screenshot( + page, + full_page: bool = False, + element_selector: Optional[str] = None, + save_screenshot: bool = True, + group_id: Optional[str] = None, +) -> Dict[str, Any]: + """Internal screenshot capture function.""" + try: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + + # Take screenshot + if element_selector: + # Screenshot specific element + element = await page.locator(element_selector).first + if not await element.is_visible(): + return { + "success": False, + "error": f"Element '{element_selector}' is not visible", + } + screenshot_data = await element.screenshot() + else: + # Screenshot page or full page + screenshot_data = await page.screenshot(full_page=full_page) + + result = { + "success": True, + "screenshot_data": screenshot_data, + "timestamp": timestamp, + } + + if save_screenshot: + screenshot_path = _build_screenshot_path(timestamp) + screenshot_path.parent.mkdir(parents=True, exist_ok=True) + + with open(screenshot_path, "wb") as f: + f.write(screenshot_data) + + result["screenshot_path"] = str(screenshot_path) + message = f"[green]Screenshot saved: {screenshot_path}[/green]" + if group_id: + emit_info(message, message_group=group_id) + else: + emit_info(message) + + return result + + except Exception as e: + return {"success": False, "error": str(e)} + + +async def take_screenshot_and_analyze( + question: str, + full_page: bool = False, + element_selector: Optional[str] = None, + save_screenshot: bool = True, +) -> Dict[str, Any]: + """ + Take a screenshot and analyze it using visual understanding. + + Args: + question: The specific question to ask about the screenshot + full_page: Whether to capture the full page or just viewport + element_selector: Optional selector to screenshot just a specific element + save_screenshot: Whether to save the screenshot to disk + + Returns: + Dict containing analysis results and screenshot info + """ + target = element_selector or ("full_page" if full_page else "viewport") + group_id = generate_group_id( + "browser_screenshot_analyze", f"{question[:50]}_{target}" + ) + emit_info( + f"[bold white on blue] BROWSER SCREENSHOT ANALYZE [/bold white on blue] 📷 question='{question[:100]}{'...' if len(question) > 100 else ''}' target={target}", + message_group=group_id, + ) + try: + # Get the current browser page + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return { + "success": False, + "error": "No active browser page available. Please navigate to a webpage first.", + "question": question, + } + + # Take screenshot + screenshot_result = await _capture_screenshot( + page, + full_page=full_page, + element_selector=element_selector, + save_screenshot=save_screenshot, + group_id=group_id, + ) + + if not screenshot_result["success"]: + error_message = screenshot_result.get("error", "Screenshot failed") + emit_error( + f"[red]Screenshot capture failed: {error_message}[/red]", + message_group=group_id, + ) + return { + "success": False, + "error": error_message, + "question": question, + } + + screenshot_bytes = screenshot_result.get("screenshot_data") + if not screenshot_bytes: + emit_error( + "[red]Screenshot captured but pixel data missing; cannot run visual analysis.[/red]", + message_group=group_id, + ) + return { + "success": False, + "error": "Screenshot captured but no image bytes available for analysis.", + "question": question, + } + + try: + vqa_result = await asyncio.to_thread( + run_vqa_analysis, + question, + screenshot_bytes, + ) + except Exception as exc: + emit_error( + f"[red]Visual question answering failed: {exc}[/red]", + message_group=group_id, + ) + return { + "success": False, + "error": f"Visual analysis failed: {exc}", + "question": question, + "screenshot_info": { + "path": screenshot_result.get("screenshot_path"), + "timestamp": screenshot_result.get("timestamp"), + "full_page": full_page, + "element_selector": element_selector, + }, + } + + emit_info( + f"[green]Visual analysis answer: {vqa_result.answer}[/green]", + message_group=group_id, + ) + emit_info( + f"[dim]Observations: {vqa_result.observations}[/dim]", + message_group=group_id, + ) + + return { + "success": True, + "question": question, + "answer": vqa_result.answer, + "confidence": vqa_result.confidence, + "observations": vqa_result.observations, + "screenshot_info": { + "path": screenshot_result.get("screenshot_path"), + "size": len(screenshot_bytes), + "timestamp": screenshot_result.get("timestamp"), + "full_page": full_page, + "element_selector": element_selector, + }, + } + + except Exception as e: + emit_info( + f"[red]Screenshot analysis failed: {str(e)}[/red]", message_group=group_id + ) + return {"success": False, "error": str(e), "question": question} + + +def register_take_screenshot_and_analyze(agent): + """Register the screenshot analysis tool.""" + + @agent.tool + async def browser_screenshot_analyze( + context: RunContext, + question: str, + full_page: bool = False, + element_selector: Optional[str] = None, + save_screenshot: bool = True, + ) -> Dict[str, Any]: + """ + Take a screenshot and analyze it to answer a specific question. + + Args: + question: The specific question to ask about the screenshot + full_page: Whether to capture the full page or just viewport + element_selector: Optional CSS/XPath selector to screenshot specific element + save_screenshot: Whether to save the screenshot to disk + + Returns: + Dict with analysis results including answer, confidence, and observations + """ + return await take_screenshot_and_analyze( + question=question, + full_page=full_page, + element_selector=element_selector, + save_screenshot=save_screenshot, + ) diff --git a/code_puppy/tools/browser/browser_scripts.py b/code_puppy/tools/browser/browser_scripts.py new file mode 100644 index 00000000..25c8b889 --- /dev/null +++ b/code_puppy/tools/browser/browser_scripts.py @@ -0,0 +1,472 @@ +"""JavaScript execution and advanced page manipulation tools.""" + +from typing import Any, Dict, Optional + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + +from .camoufox_manager import get_camoufox_manager + + +async def execute_javascript( + script: str, + timeout: int = 30000, +) -> Dict[str, Any]: + """Execute JavaScript code in the browser context.""" + group_id = generate_group_id("browser_execute_js", script[:100]) + emit_info( + f"[bold white on blue] BROWSER EXECUTE JS [/bold white on blue] 📜 script='{script[:100]}{'...' if len(script) > 100 else ''}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Execute JavaScript + result = await page.evaluate(script, timeout=timeout) + + emit_info( + "[green]JavaScript executed successfully[/green]", message_group=group_id + ) + + return {"success": True, "script": script, "result": result} + + except Exception as e: + emit_info( + f"[red]JavaScript execution failed: {str(e)}[/red]", message_group=group_id + ) + return {"success": False, "error": str(e), "script": script} + + +async def scroll_page( + direction: str = "down", + amount: int = 3, + element_selector: Optional[str] = None, +) -> Dict[str, Any]: + """Scroll the page or a specific element.""" + target = element_selector or "page" + group_id = generate_group_id("browser_scroll", f"{direction}_{amount}_{target}") + emit_info( + f"[bold white on blue] BROWSER SCROLL [/bold white on blue] 📋 direction={direction} amount={amount} target='{target}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + if element_selector: + # Scroll specific element + element = page.locator(element_selector) + await element.scroll_into_view_if_needed() + + # Get element's current scroll position and dimensions + scroll_info = await element.evaluate(""" + el => { + const rect = el.getBoundingClientRect(); + return { + scrollTop: el.scrollTop, + scrollLeft: el.scrollLeft, + scrollHeight: el.scrollHeight, + scrollWidth: el.scrollWidth, + clientHeight: el.clientHeight, + clientWidth: el.clientWidth + }; + } + """) + + # Calculate scroll amount based on element size + scroll_amount = scroll_info["clientHeight"] * amount / 3 + + if direction.lower() == "down": + await element.evaluate(f"el => el.scrollTop += {scroll_amount}") + elif direction.lower() == "up": + await element.evaluate(f"el => el.scrollTop -= {scroll_amount}") + elif direction.lower() == "left": + await element.evaluate(f"el => el.scrollLeft -= {scroll_amount}") + elif direction.lower() == "right": + await element.evaluate(f"el => el.scrollLeft += {scroll_amount}") + + target = f"element '{element_selector}'" + + else: + # Scroll page + viewport_height = await page.evaluate("() => window.innerHeight") + scroll_amount = viewport_height * amount / 3 + + if direction.lower() == "down": + await page.evaluate(f"window.scrollBy(0, {scroll_amount})") + elif direction.lower() == "up": + await page.evaluate(f"window.scrollBy(0, -{scroll_amount})") + elif direction.lower() == "left": + await page.evaluate(f"window.scrollBy(-{scroll_amount}, 0)") + elif direction.lower() == "right": + await page.evaluate(f"window.scrollBy({scroll_amount}, 0)") + + target = "page" + + # Get current scroll position + scroll_pos = await page.evaluate(""" + () => ({ + x: window.pageXOffset, + y: window.pageYOffset + }) + """) + + emit_info( + f"[green]Scrolled {target} {direction}[/green]", message_group=group_id + ) + + return { + "success": True, + "direction": direction, + "amount": amount, + "target": target, + "scroll_position": scroll_pos, + } + + except Exception as e: + return { + "success": False, + "error": str(e), + "direction": direction, + "element_selector": element_selector, + } + + +async def scroll_to_element( + selector: str, + timeout: int = 10000, +) -> Dict[str, Any]: + """Scroll to bring an element into view.""" + group_id = generate_group_id("browser_scroll_to_element", selector[:100]) + emit_info( + f"[bold white on blue] BROWSER SCROLL TO ELEMENT [/bold white on blue] 🎯 selector='{selector}'", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="attached", timeout=timeout) + await element.scroll_into_view_if_needed() + + # Check if element is now visible + is_visible = await element.is_visible() + + emit_info( + f"[green]Scrolled to element: {selector}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "visible": is_visible} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def set_viewport_size( + width: int, + height: int, +) -> Dict[str, Any]: + """Set the viewport size.""" + group_id = generate_group_id("browser_set_viewport", f"{width}x{height}") + emit_info( + f"[bold white on blue] BROWSER SET VIEWPORT [/bold white on blue] 🖥️ size={width}x{height}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + await page.set_viewport_size({"width": width, "height": height}) + + emit_info( + f"[green]Set viewport size to {width}x{height}[/green]", + message_group=group_id, + ) + + return {"success": True, "width": width, "height": height} + + except Exception as e: + return {"success": False, "error": str(e), "width": width, "height": height} + + +async def wait_for_element( + selector: str, + state: str = "visible", + timeout: int = 30000, +) -> Dict[str, Any]: + """Wait for an element to reach a specific state.""" + group_id = generate_group_id("browser_wait_for_element", f"{selector[:50]}_{state}") + emit_info( + f"[bold white on blue] BROWSER WAIT FOR ELEMENT [/bold white on blue] ⏱️ selector='{selector}' state={state} timeout={timeout}ms", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state=state, timeout=timeout) + + emit_info( + f"[green]Element {selector} is now {state}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "state": state} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector, "state": state} + + +async def highlight_element( + selector: str, + color: str = "red", + timeout: int = 10000, +) -> Dict[str, Any]: + """Highlight an element with a colored border.""" + group_id = generate_group_id( + "browser_highlight_element", f"{selector[:50]}_{color}" + ) + emit_info( + f"[bold white on blue] BROWSER HIGHLIGHT ELEMENT [/bold white on blue] 🔦 selector='{selector}' color={color}", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + element = page.locator(selector) + await element.wait_for(state="visible", timeout=timeout) + + # Add highlight style + highlight_script = f""" + el => {{ + el.style.outline = '3px solid {color}'; + el.style.outlineOffset = '2px'; + el.style.backgroundColor = '{color}20'; // 20% opacity + el.setAttribute('data-highlighted', 'true'); + }} + """ + + await element.evaluate(highlight_script) + + emit_info( + f"[green]Highlighted element: {selector}[/green]", message_group=group_id + ) + + return {"success": True, "selector": selector, "color": color} + + except Exception as e: + return {"success": False, "error": str(e), "selector": selector} + + +async def clear_highlights() -> Dict[str, Any]: + """Clear all element highlights.""" + group_id = generate_group_id("browser_clear_highlights") + emit_info( + "[bold white on blue] BROWSER CLEAR HIGHLIGHTS [/bold white on blue] 🧹", + message_group=group_id, + ) + try: + browser_manager = get_camoufox_manager() + page = await browser_manager.get_current_page() + + if not page: + return {"success": False, "error": "No active browser page available"} + + # Remove all highlights + clear_script = """ + () => { + const highlighted = document.querySelectorAll('[data-highlighted="true"]'); + highlighted.forEach(el => { + el.style.outline = ''; + el.style.outlineOffset = ''; + el.style.backgroundColor = ''; + el.removeAttribute('data-highlighted'); + }); + return highlighted.length; + } + """ + + count = await page.evaluate(clear_script) + + emit_info(f"[green]Cleared {count} highlights[/green]", message_group=group_id) + + return {"success": True, "cleared_count": count} + + except Exception as e: + return {"success": False, "error": str(e)} + + +# Tool registration functions +def register_execute_javascript(agent): + """Register the JavaScript execution tool.""" + + @agent.tool + async def browser_execute_js( + context: RunContext, + script: str, + timeout: int = 30000, + ) -> Dict[str, Any]: + """ + Execute JavaScript code in the browser context. + + Args: + script: JavaScript code to execute + timeout: Timeout in milliseconds + + Returns: + Dict with execution results + """ + return await execute_javascript(script, timeout) + + +def register_scroll_page(agent): + """Register the scroll page tool.""" + + @agent.tool + async def browser_scroll( + context: RunContext, + direction: str = "down", + amount: int = 3, + element_selector: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Scroll the page or a specific element. + + Args: + direction: Scroll direction (up, down, left, right) + amount: Scroll amount multiplier (1-10) + element_selector: Optional selector to scroll specific element + + Returns: + Dict with scroll results + """ + return await scroll_page(direction, amount, element_selector) + + +def register_scroll_to_element(agent): + """Register the scroll to element tool.""" + + @agent.tool + async def browser_scroll_to_element( + context: RunContext, + selector: str, + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Scroll to bring an element into view. + + Args: + selector: CSS or XPath selector for the element + timeout: Timeout in milliseconds + + Returns: + Dict with scroll results + """ + return await scroll_to_element(selector, timeout) + + +def register_set_viewport_size(agent): + """Register the viewport size tool.""" + + @agent.tool + async def browser_set_viewport( + context: RunContext, + width: int, + height: int, + ) -> Dict[str, Any]: + """ + Set the browser viewport size. + + Args: + width: Viewport width in pixels + height: Viewport height in pixels + + Returns: + Dict with viewport size results + """ + return await set_viewport_size(width, height) + + +def register_wait_for_element(agent): + """Register the wait for element tool.""" + + @agent.tool + async def browser_wait_for_element( + context: RunContext, + selector: str, + state: str = "visible", + timeout: int = 30000, + ) -> Dict[str, Any]: + """ + Wait for an element to reach a specific state. + + Args: + selector: CSS or XPath selector for the element + state: State to wait for (visible, hidden, attached, detached) + timeout: Timeout in milliseconds + + Returns: + Dict with wait results + """ + return await wait_for_element(selector, state, timeout) + + +def register_browser_highlight_element(agent): + """Register the element highlighting tool.""" + + @agent.tool + async def browser_highlight_element( + context: RunContext, + selector: str, + color: str = "red", + timeout: int = 10000, + ) -> Dict[str, Any]: + """ + Highlight an element with a colored border for visual identification. + + Args: + selector: CSS or XPath selector for the element + color: Highlight color (red, blue, green, yellow, etc.) + timeout: Timeout in milliseconds + + Returns: + Dict with highlight results + """ + return await highlight_element(selector, color, timeout) + + +def register_browser_clear_highlights(agent): + """Register the clear highlights tool.""" + + @agent.tool + async def browser_clear_highlights(context: RunContext) -> Dict[str, Any]: + """ + Clear all element highlights from the page. + + Returns: + Dict with clear results + """ + return await clear_highlights() diff --git a/code_puppy/tools/browser/browser_workflows.py b/code_puppy/tools/browser/browser_workflows.py new file mode 100644 index 00000000..2155e818 --- /dev/null +++ b/code_puppy/tools/browser/browser_workflows.py @@ -0,0 +1,204 @@ +"""Browser workflow management tools for saving and reusing automation patterns.""" + +from pathlib import Path +from typing import Any, Dict + +from pydantic_ai import RunContext + +from code_puppy.messaging import emit_info +from code_puppy.tools.common import generate_group_id + + +def get_workflows_directory() -> Path: + """Get the browser workflows directory, creating it if it doesn't exist.""" + home_dir = Path.home() + workflows_dir = home_dir / ".code_puppy" / "browser_workflows" + workflows_dir.mkdir(parents=True, exist_ok=True) + return workflows_dir + + +async def save_workflow(name: str, content: str) -> Dict[str, Any]: + """Save a browser workflow as a markdown file.""" + group_id = generate_group_id("save_workflow", name) + emit_info( + f"[bold white on blue] SAVE WORKFLOW [/bold white on blue] 💾 name='{name}'", + message_group=group_id, + ) + + try: + workflows_dir = get_workflows_directory() + + # Clean up the filename - remove spaces, special chars, etc. + safe_name = "".join(c for c in name if c.isalnum() or c in ("-", "_")).lower() + if not safe_name: + safe_name = "workflow" + + # Ensure .md extension + if not safe_name.endswith(".md"): + safe_name += ".md" + + workflow_path = workflows_dir / safe_name + + # Write the workflow content + with open(workflow_path, "w", encoding="utf-8") as f: + f.write(content) + + emit_info( + f"[green]✅ Workflow saved successfully: {workflow_path}[/green]", + message_group=group_id, + ) + + return { + "success": True, + "path": str(workflow_path), + "name": safe_name, + "size": len(content), + } + + except Exception as e: + emit_info( + f"[red]❌ Failed to save workflow: {e}[/red]", + message_group=group_id, + ) + return {"success": False, "error": str(e), "name": name} + + +async def list_workflows() -> Dict[str, Any]: + """List all available browser workflows.""" + group_id = generate_group_id("list_workflows") + emit_info( + "[bold white on blue] LIST WORKFLOWS [/bold white on blue] 📋", + message_group=group_id, + ) + + try: + workflows_dir = get_workflows_directory() + + # Find all .md files in the workflows directory + workflow_files = list(workflows_dir.glob("*.md")) + + workflows = [] + for workflow_file in workflow_files: + try: + stat = workflow_file.stat() + workflows.append( + { + "name": workflow_file.name, + "path": str(workflow_file), + "size": stat.st_size, + "modified": stat.st_mtime, + } + ) + except Exception as e: + emit_info( + f"[yellow]Warning: Could not read {workflow_file}: {e}[/yellow]" + ) + + # Sort by modification time (newest first) + workflows.sort(key=lambda x: x["modified"], reverse=True) + + emit_info( + f"[green]✅ Found {len(workflows)} workflow(s)[/green]", + message_group=group_id, + ) + + return { + "success": True, + "workflows": workflows, + "count": len(workflows), + "directory": str(workflows_dir), + } + + except Exception as e: + emit_info( + f"[red]❌ Failed to list workflows: {e}[/red]", + message_group=group_id, + ) + return {"success": False, "error": str(e)} + + +async def read_workflow(name: str) -> Dict[str, Any]: + """Read a saved browser workflow.""" + group_id = generate_group_id("read_workflow", name) + emit_info( + f"[bold white on blue] READ WORKFLOW [/bold white on blue] 📖 name='{name}'", + message_group=group_id, + ) + + try: + workflows_dir = get_workflows_directory() + + # Handle both with and without .md extension + if not name.endswith(".md"): + name += ".md" + + workflow_path = workflows_dir / name + + if not workflow_path.exists(): + emit_info( + f"[red]❌ Workflow not found: {name}[/red]", + message_group=group_id, + ) + return { + "success": False, + "error": f"Workflow '{name}' not found", + "name": name, + } + + # Read the workflow content + with open(workflow_path, "r", encoding="utf-8") as f: + content = f.read() + + emit_info( + f"[green]✅ Workflow read successfully: {len(content)} characters[/green]", + message_group=group_id, + ) + + return { + "success": True, + "name": name, + "content": content, + "path": str(workflow_path), + "size": len(content), + } + + except Exception as e: + emit_info( + f"[red]❌ Failed to read workflow: {e}[/red]", + message_group=group_id, + ) + return {"success": False, "error": str(e), "name": name} + + +def register_save_workflow(agent): + """Register the save workflow tool.""" + + @agent.tool + async def browser_save_workflow( + context: RunContext, + name: str, + content: str, + ) -> Dict[str, Any]: + """Save a browser automation workflow to disk for future reuse.""" + return await save_workflow(name, content) + + +def register_list_workflows(agent): + """Register the list workflows tool.""" + + @agent.tool + async def browser_list_workflows(context: RunContext) -> Dict[str, Any]: + """List all saved browser automation workflows.""" + return await list_workflows() + + +def register_read_workflow(agent): + """Register the read workflow tool.""" + + @agent.tool + async def browser_read_workflow( + context: RunContext, + name: str, + ) -> Dict[str, Any]: + """Read the contents of a saved browser automation workflow.""" + return await read_workflow(name) diff --git a/code_puppy/tools/browser/camoufox_manager.py b/code_puppy/tools/browser/camoufox_manager.py new file mode 100644 index 00000000..0f976526 --- /dev/null +++ b/code_puppy/tools/browser/camoufox_manager.py @@ -0,0 +1,217 @@ +"""Camoufox browser manager - privacy-focused Firefox automation.""" + +from pathlib import Path +from typing import Optional, TypeAlias + +import camoufox +from camoufox.addons import DefaultAddons +from camoufox.exceptions import CamoufoxNotInstalled, UnsupportedVersion +from camoufox.locale import ALLOW_GEOIP, download_mmdb +from camoufox.pkgman import CamoufoxFetcher, camoufox_path +from playwright.async_api import Browser, BrowserContext, Page + +_MIN_VIEWPORT_DIMENSION = 640 + +from code_puppy.messaging import emit_info + + +class CamoufoxManager: + """Singleton browser manager for Camoufox (privacy-focused Firefox) automation.""" + + _instance: Optional["CamoufoxManager"] = None + _browser: Optional[Browser] = None + _context: Optional[BrowserContext] = None + _initialized: bool = False + + def __new__(cls): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self): + # Only initialize once + if hasattr(self, "_init_done"): + return + self._init_done = True + + self.headless = False + self.homepage = "https://www.google.com" + # Camoufox-specific settings + self.geoip = True # Enable GeoIP spoofing + self.block_webrtc = True # Block WebRTC for privacy + self.humanize = True # Add human-like behavior + + # Persistent profile directory for consistent browser state across runs + self.profile_dir = self._get_profile_directory() + + @classmethod + def get_instance(cls) -> "CamoufoxManager": + """Get the singleton instance.""" + if cls._instance is None: + cls._instance = cls() + return cls._instance + + def _get_profile_directory(self) -> Path: + """Get or create the persistent profile directory. + + Returns a Path object pointing to ~/.code_puppy/camoufox_profile + where browser data (cookies, history, bookmarks, etc.) will be stored. + """ + profile_path = Path.home() / ".code_puppy" / "camoufox_profile" + profile_path.mkdir(parents=True, exist_ok=True) + return profile_path + + async def async_initialize(self) -> None: + """Initialize Camoufox browser.""" + if self._initialized: + return + + try: + emit_info("[yellow]Initializing Camoufox (privacy Firefox)...[/yellow]") + + # Ensure Camoufox binary and dependencies are fetched before launching + await self._prefetch_camoufox() + + await self._initialize_camoufox() + emit_info( + "[green]✅ Camoufox initialized successfully (privacy-focused Firefox)[/green]" + ) + self._initialized = True + + except Exception: + await self._cleanup() + raise + + async def _initialize_camoufox(self) -> None: + """Try to start Camoufox with the configured privacy settings.""" + emit_info(f"[cyan]📁 Using persistent profile: {self.profile_dir}[/cyan]") + + camoufox_instance = camoufox.AsyncCamoufox( + headless=self.headless, + block_webrtc=self.block_webrtc, + humanize=self.humanize, + exclude_addons=list(DefaultAddons), + persistent_context=True, + user_data_dir=str(self.profile_dir), + addons=[], + ) + + self._browser = camoufox_instance.browser + # Use persistent storage directory for browser context + # This ensures cookies, localStorage, history, etc. persist across runs + if not self._initialized: + self._context = await camoufox_instance.start() + self._initialized = True + # Do not auto-open a page here to avoid duplicate windows/tabs. + + async def get_current_page(self) -> Optional[Page]: + """Get the currently active page. Lazily creates one if none exist.""" + if not self._initialized or not self._context: + await self.async_initialize() + + if not self._context: + return None + + pages = self._context.pages + if pages: + return pages[0] + + # Lazily create a new blank page without navigation + return await self._context.new_page() + + async def new_page(self, url: Optional[str] = None) -> Page: + """Create a new page and optionally navigate to URL.""" + if not self._initialized: + await self.async_initialize() + + page = await self._context.new_page() + if url: + await page.goto(url) + return page + + async def _prefetch_camoufox(self) -> None: + """Prefetch Camoufox binary and dependencies.""" + emit_info( + "[cyan]🔍 Ensuring Camoufox binary and dependencies are up-to-date...[/cyan]" + ) + + needs_install = False + try: + camoufox_path(download_if_missing=False) + emit_info("[cyan]🗃️ Using cached Camoufox installation[/cyan]") + except (CamoufoxNotInstalled, FileNotFoundError): + emit_info("[cyan]📥 Camoufox not found, installing fresh copy[/cyan]") + needs_install = True + except UnsupportedVersion: + emit_info("[cyan]♻️ Camoufox update required, reinstalling[/cyan]") + needs_install = True + + if needs_install: + CamoufoxFetcher().install() + + # Fetch GeoIP database if enabled + if ALLOW_GEOIP: + download_mmdb() + + emit_info("[cyan]📦 Camoufox dependencies ready[/cyan]") + + async def close_page(self, page: Page) -> None: + """Close a specific page.""" + await page.close() + + async def get_all_pages(self) -> list[Page]: + """Get all open pages.""" + if not self._context: + return [] + return self._context.pages + + async def _cleanup(self) -> None: + """Clean up browser resources and save persistent state.""" + try: + # Save browser state before closing (cookies, localStorage, etc.) + if self._context: + try: + storage_state_path = self.profile_dir / "storage_state.json" + await self._context.storage_state(path=str(storage_state_path)) + emit_info( + f"[green]💾 Browser state saved to {storage_state_path}[/green]" + ) + except Exception as e: + emit_info( + f"[yellow]Warning: Could not save storage state: {e}[/yellow]" + ) + + await self._context.close() + self._context = None + if self._browser: + await self._browser.close() + self._browser = None + self._initialized = False + except Exception as e: + emit_info(f"[yellow]Warning during cleanup: {e}[/yellow]") + + async def close(self) -> None: + """Close the browser and clean up resources.""" + await self._cleanup() + emit_info("[yellow]Camoufox browser closed[/yellow]") + + def __del__(self): + """Ensure cleanup on object destruction.""" + # Note: Can't use async in __del__, so this is just a fallback + if self._initialized: + import asyncio + + try: + loop = asyncio.get_event_loop() + if loop.is_running(): + loop.create_task(self._cleanup()) + else: + loop.run_until_complete(self._cleanup()) + except Exception: + pass # Best effort cleanup + + +# Convenience function for getting the singleton instance +def get_camoufox_manager() -> CamoufoxManager: + """Get the singleton CamoufoxManager instance.""" + return CamoufoxManager.get_instance() diff --git a/code_puppy/tools/browser/vqa_agent.py b/code_puppy/tools/browser/vqa_agent.py new file mode 100644 index 00000000..f29b49a4 --- /dev/null +++ b/code_puppy/tools/browser/vqa_agent.py @@ -0,0 +1,62 @@ +"""Utilities for running visual question-answering via pydantic-ai.""" + +from __future__ import annotations + +from functools import lru_cache + +from pydantic import BaseModel, Field +from pydantic_ai import Agent, BinaryContent + +from code_puppy.config import get_vqa_model_name +from code_puppy.model_factory import ModelFactory + + +class VisualAnalysisResult(BaseModel): + """Structured response from the VQA agent.""" + + answer: str + confidence: float = Field(ge=0.0, le=1.0) + observations: str + + +@lru_cache(maxsize=1) +def _load_vqa_agent(model_name: str) -> Agent[None, VisualAnalysisResult]: + """Create a cached agent instance for visual analysis.""" + models_config = ModelFactory.load_config() + model = ModelFactory.get_model(model_name, models_config) + + instructions = ( + "You are a visual analysis specialist. Answer the user's question about the provided image. " + "Always respond using the structured schema: answer, confidence (0-1 float), observations. " + "Confidence reflects how certain you are about the answer. Observations should include useful, concise context." + ) + + return Agent( + model=model, + instructions=instructions, + output_type=VisualAnalysisResult, + retries=2, + ) + + +def _get_vqa_agent() -> Agent[None, VisualAnalysisResult]: + """Return a cached VQA agent configured with the current model.""" + model_name = get_vqa_model_name() + # lru_cache keyed by model_name ensures refresh when configuration changes + return _load_vqa_agent(model_name) + + +def run_vqa_analysis( + question: str, + image_bytes: bytes, + media_type: str = "image/png", +) -> VisualAnalysisResult: + """Execute the VQA agent synchronously against screenshot bytes.""" + agent = _get_vqa_agent() + result = agent.run_sync( + [ + question, + BinaryContent(data=image_bytes, media_type=media_type), + ] + ) + return result.output diff --git a/code_puppy/tools/command_runner.py b/code_puppy/tools/command_runner.py index 0f462f67..bd4126d7 100644 --- a/code_puppy/tools/command_runner.py +++ b/code_puppy/tools/command_runner.py @@ -1,68 +1,456 @@ -# command_runner.py +import os +import signal import subprocess +import sys +import threading import time -import os -from typing import Dict, Any -from code_puppy.tools.common import console -from code_puppy.agent import code_generation_agent +import traceback +from typing import Set + +from pydantic import BaseModel from pydantic_ai import RunContext from rich.markdown import Markdown -from rich.syntax import Syntax +from rich.text import Text -# Environment variables used in this module: -# - YOLO_MODE: When set to "true" (case-insensitive), bypasses the safety confirmation -# prompt when running shell commands. This allows commands to execute -# without user intervention, which can be useful for automation but -# introduces security risks. Default is "false". +from code_puppy.messaging import ( + emit_divider, + emit_error, + emit_info, + emit_system_message, + emit_warning, +) +from code_puppy.tools.common import generate_group_id +from code_puppy.tui_state import is_tui_mode +# Maximum line length for shell command output to prevent massive token usage +# This helps avoid exceeding model context limits when commands produce very long lines +MAX_LINE_LENGTH = 256 -@code_generation_agent.tool -def run_shell_command( - context: RunContext, command: str, cwd: str = None, timeout: int = 60 -) -> Dict[str, Any]: - """Run a shell command and return its output. - Args: - command: The shell command to execute. - cwd: The current working directory to run the command in. Defaults to None (current directory). - timeout: Maximum time in seconds to wait for the command to complete. Defaults to 60. +def _truncate_line(line: str) -> str: + """Truncate a line to MAX_LINE_LENGTH if it exceeds the limit.""" + if len(line) > MAX_LINE_LENGTH: + return line[:MAX_LINE_LENGTH] + "... [truncated]" + return line + + +_AWAITING_USER_INPUT = False + +_CONFIRMATION_LOCK = threading.Lock() + +# Track running shell processes so we can kill them on Ctrl-C from the UI +_RUNNING_PROCESSES: Set[subprocess.Popen] = set() +_RUNNING_PROCESSES_LOCK = threading.Lock() +_USER_KILLED_PROCESSES = set() + - Returns: - A dictionary with the command result, including stdout, stderr, and exit code. +def _register_process(proc: subprocess.Popen) -> None: + with _RUNNING_PROCESSES_LOCK: + _RUNNING_PROCESSES.add(proc) + + +def _unregister_process(proc: subprocess.Popen) -> None: + with _RUNNING_PROCESSES_LOCK: + _RUNNING_PROCESSES.discard(proc) + + +def _kill_process_group(proc: subprocess.Popen) -> None: + """Attempt to aggressively terminate a process and its group. + + Cross-platform best-effort. On POSIX, uses process groups. On Windows, tries CTRL_BREAK_EVENT, then terminate(). """ - if not command or not command.strip(): - console.print("[bold red]Error:[/bold red] Command cannot be empty") - return {"error": "Command cannot be empty"} - - # Display command execution in a visually distinct way - console.print("\n[bold white on blue] SHELL COMMAND [/bold white on blue]") - console.print(f"[bold green]$ {command}[/bold green]") - if cwd: - console.print(f"[dim]Working directory: {cwd}[/dim]") - console.print("[dim]" + "-" * 60 + "[/dim]") - - import os - - # Check for YOLO_MODE environment variable to bypass safety check - yolo_mode = os.getenv("YOLO_MODE", "false").lower() == "true" - - if not yolo_mode: - # Prompt user for confirmation before running the command - user_input = input("Are you sure you want to run this command? (yes/no): ") - if user_input.strip().lower() not in {"yes", "y"}: - console.print( - "[bold yellow]Command execution canceled by user.[/bold yellow]" - ) - return { + try: + if sys.platform.startswith("win"): + try: + # Try a soft break first if the group exists + proc.send_signal(signal.CTRL_BREAK_EVENT) # type: ignore[attr-defined] + time.sleep(0.8) + except Exception: + pass + if proc.poll() is None: + try: + proc.terminate() + time.sleep(0.8) + except Exception: + pass + if proc.poll() is None: + try: + proc.kill() + except Exception: + pass + return + + # POSIX + pid = proc.pid + try: + pgid = os.getpgid(pid) + os.killpg(pgid, signal.SIGTERM) + time.sleep(1.0) + if proc.poll() is None: + os.killpg(pgid, signal.SIGINT) + time.sleep(0.6) + if proc.poll() is None: + os.killpg(pgid, signal.SIGKILL) + time.sleep(0.5) + except (OSError, ProcessLookupError): + # Fall back to direct kill of the process + try: + if proc.poll() is None: + proc.kill() + except (OSError, ProcessLookupError): + pass + + if proc.poll() is None: + # Last ditch attempt; may be unkillable zombie + try: + for _ in range(3): + os.kill(proc.pid, signal.SIGKILL) + time.sleep(0.2) + if proc.poll() is not None: + break + except Exception: + pass + except Exception as e: + emit_error(f"Kill process error: {e}") + + +def kill_all_running_shell_processes() -> int: + """Kill all currently tracked running shell processes. + + Returns the number of processes signaled. + """ + procs: list[subprocess.Popen] + with _RUNNING_PROCESSES_LOCK: + procs = list(_RUNNING_PROCESSES) + count = 0 + for p in procs: + try: + if p.poll() is None: + _kill_process_group(p) + count += 1 + _USER_KILLED_PROCESSES.add(p.pid) + finally: + _unregister_process(p) + return count + + +# Function to check if user input is awaited +def is_awaiting_user_input(): + """Check if command_runner is waiting for user input.""" + global _AWAITING_USER_INPUT + return _AWAITING_USER_INPUT + + +# Function to set user input flag +def set_awaiting_user_input(awaiting=True): + """Set the flag indicating if user input is awaited.""" + global _AWAITING_USER_INPUT + _AWAITING_USER_INPUT = awaiting + + # When we're setting this flag, also pause/resume all active spinners + if awaiting: + # Pause all active spinners (imported here to avoid circular imports) + try: + from code_puppy.messaging.spinner import pause_all_spinners + + pause_all_spinners() + except ImportError: + pass # Spinner functionality not available + else: + # Resume all active spinners + try: + from code_puppy.messaging.spinner import resume_all_spinners + + resume_all_spinners() + except ImportError: + pass # Spinner functionality not available + + +class ShellCommandOutput(BaseModel): + success: bool + command: str | None + error: str | None = "" + stdout: str | None + stderr: str | None + exit_code: int | None + execution_time: float | None + timeout: bool | None = False + user_interrupted: bool | None = False + + +def run_shell_command_streaming( + process: subprocess.Popen, + timeout: int = 60, + command: str = "", + group_id: str = None, +): + start_time = time.time() + last_output_time = [start_time] + + ABSOLUTE_TIMEOUT_SECONDS = 270 + + stdout_lines = [] + stderr_lines = [] + + stdout_thread = None + stderr_thread = None + + def read_stdout(): + try: + for line in iter(process.stdout.readline, ""): + if line: + line = line.rstrip("\n\r") + # Limit line length to prevent massive token usage + line = _truncate_line(line) + stdout_lines.append(line) + emit_system_message(line, message_group=group_id) + last_output_time[0] = time.time() + except Exception: + pass + + def read_stderr(): + try: + for line in iter(process.stderr.readline, ""): + if line: + line = line.rstrip("\n\r") + # Limit line length to prevent massive token usage + line = _truncate_line(line) + stderr_lines.append(line) + emit_system_message(line, message_group=group_id) + last_output_time[0] = time.time() + except Exception: + pass + + def cleanup_process_and_threads(timeout_type: str = "unknown"): + nonlocal stdout_thread, stderr_thread + + def nuclear_kill(proc): + _kill_process_group(proc) + + try: + if process.poll() is None: + nuclear_kill(process) + + try: + if process.stdout and not process.stdout.closed: + process.stdout.close() + if process.stderr and not process.stderr.closed: + process.stderr.close() + if process.stdin and not process.stdin.closed: + process.stdin.close() + except (OSError, ValueError): + pass + + # Unregister once we're done cleaning up + _unregister_process(process) + + if stdout_thread and stdout_thread.is_alive(): + stdout_thread.join(timeout=3) + if stdout_thread.is_alive(): + emit_warning( + f"stdout reader thread failed to terminate after {timeout_type} timeout", + message_group=group_id, + ) + + if stderr_thread and stderr_thread.is_alive(): + stderr_thread.join(timeout=3) + if stderr_thread.is_alive(): + emit_warning( + f"stderr reader thread failed to terminate after {timeout_type} timeout", + message_group=group_id, + ) + + except Exception as e: + emit_warning(f"Error during process cleanup: {e}", message_group=group_id) + + execution_time = time.time() - start_time + return ShellCommandOutput( + **{ "success": False, "command": command, - "error": "User canceled command execution", + "stdout": "\n".join(stdout_lines[-256:]), + "stderr": "\n".join(stderr_lines[-256:]), + "exit_code": -9, + "execution_time": execution_time, + "timeout": True, + "error": f"Command timed out after {timeout} seconds", } + ) try: + stdout_thread = threading.Thread(target=read_stdout, daemon=True) + stderr_thread = threading.Thread(target=read_stderr, daemon=True) + + stdout_thread.start() + stderr_thread.start() + + while process.poll() is None: + current_time = time.time() + + if current_time - start_time > ABSOLUTE_TIMEOUT_SECONDS: + error_msg = Text() + error_msg.append( + "Process killed: inactivity timeout reached", style="bold red" + ) + emit_error(error_msg, message_group=group_id) + return cleanup_process_and_threads("absolute") + + if current_time - last_output_time[0] > timeout: + error_msg = Text() + error_msg.append( + "Process killed: inactivity timeout reached", style="bold red" + ) + emit_error(error_msg, message_group=group_id) + return cleanup_process_and_threads("inactivity") + + time.sleep(0.1) + + if stdout_thread: + stdout_thread.join(timeout=5) + if stderr_thread: + stderr_thread.join(timeout=5) + + exit_code = process.returncode + execution_time = time.time() - start_time + + try: + if process.stdout and not process.stdout.closed: + process.stdout.close() + if process.stderr and not process.stderr.closed: + process.stderr.close() + if process.stdin and not process.stdin.closed: + process.stdin.close() + except (OSError, ValueError): + pass + + _unregister_process(process) + + if exit_code != 0: + emit_error( + f"Command failed with exit code {exit_code}", message_group=group_id + ) + emit_info(f"Took {execution_time:.2f}s", message_group=group_id) + time.sleep(1) + # Apply line length limits to stdout/stderr before returning + truncated_stdout = [_truncate_line(line) for line in stdout_lines[-256:]] + truncated_stderr = [_truncate_line(line) for line in stderr_lines[-256:]] + + return ShellCommandOutput( + success=False, + command=command, + error="""The process didn't exit cleanly! If the user_interrupted flag is true, + please stop all execution and ask the user for clarification!""", + stdout="\n".join(truncated_stdout), + stderr="\n".join(truncated_stderr), + exit_code=exit_code, + execution_time=execution_time, + timeout=False, + user_interrupted=process.pid in _USER_KILLED_PROCESSES, + ) + # Apply line length limits to stdout/stderr before returning + truncated_stdout = [_truncate_line(line) for line in stdout_lines[-256:]] + truncated_stderr = [_truncate_line(line) for line in stderr_lines[-256:]] + + return ShellCommandOutput( + success=exit_code == 0, + command=command, + stdout="\n".join(truncated_stdout), + stderr="\n".join(truncated_stderr), + exit_code=exit_code, + execution_time=execution_time, + timeout=False, + ) + + except Exception as e: + return ShellCommandOutput( + success=False, + command=command, + error=f"Error during streaming execution: {str(e)}", + stdout="\n".join(stdout_lines[-1000:]), + stderr="\n".join(stderr_lines[-1000:]), + exit_code=-1, + timeout=False, + ) + + +def run_shell_command( + context: RunContext, command: str, cwd: str = None, timeout: int = 60 +) -> ShellCommandOutput: + command_displayed = False + + # Generate unique group_id for this command execution + group_id = generate_group_id("shell_command", command) + + if not command or not command.strip(): + emit_error("Command cannot be empty", message_group=group_id) + return ShellCommandOutput( + **{"success": False, "error": "Command cannot be empty"} + ) + + emit_info( + f"\n[bold white on blue] SHELL COMMAND [/bold white on blue] 📂 [bold green]$ {command}[/bold green]", + message_group=group_id, + ) + + from code_puppy.config import get_yolo_mode + + yolo_mode = get_yolo_mode() + + confirmation_lock_acquired = False + + # Only ask for confirmation if we're in an interactive TTY and not in yolo mode. + if not yolo_mode and sys.stdin.isatty(): + confirmation_lock_acquired = _CONFIRMATION_LOCK.acquire(blocking=False) + if not confirmation_lock_acquired: + return ShellCommandOutput( + success=False, + command=command, + error="Another command is currently awaiting confirmation", + ) + + command_displayed = True + + if cwd: + emit_info(f"[dim] Working directory: {cwd} [/dim]", message_group=group_id) + + # Set the flag to indicate we're awaiting user input + set_awaiting_user_input(True) + + time.sleep(0.2) + sys.stdout.write("Are you sure you want to run this command? (y(es)/n(o))\n") + sys.stdout.flush() + + try: + user_input = input() + confirmed = user_input.strip().lower() in {"yes", "y"} + except (KeyboardInterrupt, EOFError): + emit_warning("\n Cancelled by user") + confirmed = False + finally: + # Clear the flag regardless of the outcome + set_awaiting_user_input(False) + if confirmation_lock_acquired: + _CONFIRMATION_LOCK.release() + + if not confirmed: + result = ShellCommandOutput( + success=False, command=command, error="User rejected the command!" + ) + return result + else: start_time = time.time() - # Execute the command with timeout + try: + creationflags = 0 + preexec_fn = None + if sys.platform.startswith("win"): + try: + creationflags = subprocess.CREATE_NEW_PROCESS_GROUP # type: ignore[attr-defined] + except Exception: + creationflags = 0 + else: + preexec_fn = os.setsid if hasattr(os, "setsid") else None + process = subprocess.Popen( command, shell=True, @@ -70,143 +458,173 @@ def run_shell_command( stderr=subprocess.PIPE, text=True, cwd=cwd, + bufsize=1, + universal_newlines=True, + preexec_fn=preexec_fn, + creationflags=creationflags, ) - + _register_process(process) try: - stdout, stderr = process.communicate(timeout=timeout) - exit_code = process.returncode - execution_time = time.time() - start_time - - # Display command output - if stdout.strip(): - console.print("[bold white]STDOUT:[/bold white]") - console.print( - Syntax( - stdout.strip(), - "bash", - theme="monokai", - background_color="default", - ) - ) + return run_shell_command_streaming( + process, timeout=timeout, command=command, group_id=group_id + ) + finally: + # Ensure unregistration in case streaming returned early or raised + _unregister_process(process) + except Exception as e: + emit_error(traceback.format_exc(), message_group=group_id) + if "stdout" not in locals(): + stdout = None + if "stderr" not in locals(): + stderr = None - if stderr.strip(): - console.print("[bold yellow]STDERR:[/bold yellow]") - console.print( - Syntax( - stderr.strip(), - "bash", - theme="monokai", - background_color="default", - ) - ) + # Apply line length limits to stdout/stderr if they exist + truncated_stdout = None + if stdout: + stdout_lines = stdout.split("\n") + truncated_stdout = "\n".join( + [_truncate_line(line) for line in stdout_lines[-256:]] + ) - # Show execution summary - if exit_code == 0: - console.print( - f"[bold green]✓ Command completed successfully[/bold green] [dim](took {execution_time:.2f}s)[/dim]" - ) - else: - console.print( - f"[bold red]✗ Command failed with exit code {exit_code}[/bold red] [dim](took {execution_time:.2f}s)[/dim]" - ) + truncated_stderr = None + if stderr: + stderr_lines = stderr.split("\n") + truncated_stderr = "\n".join( + [_truncate_line(line) for line in stderr_lines[-256:]] + ) - console.print("[dim]" + "-" * 60 + "[/dim]\n") + return ShellCommandOutput( + success=False, + command=command, + error=f"Error executing command {str(e)}", + stdout=truncated_stdout, + stderr=truncated_stderr, + exit_code=-1, + timeout=False, + ) - return { - "success": exit_code == 0, - "command": command, - "stdout": stdout, - "stderr": stderr, - "exit_code": exit_code, - "execution_time": execution_time, - "timeout": False, - } - except subprocess.TimeoutExpired: - # Kill the process if it times out - process.kill() - stdout, stderr = process.communicate() - execution_time = time.time() - start_time - - # Display timeout information - if stdout.strip(): - console.print( - "[bold white]STDOUT (incomplete due to timeout):[/bold white]" - ) - console.print( - Syntax( - stdout.strip(), - "bash", - theme="monokai", - background_color="default", - ) - ) - if stderr.strip(): - console.print("[bold yellow]STDERR:[/bold yellow]") - console.print( - Syntax( - stderr.strip(), - "bash", - theme="monokai", - background_color="default", - ) - ) +class ReasoningOutput(BaseModel): + success: bool = True - console.print( - f"[bold red]⏱ Command timed out after {timeout} seconds[/bold red] [dim](ran for {execution_time:.2f}s)[/dim]" - ) - console.print("[dim]" + "-" * 60 + "[/dim]\n") - return { - "success": False, - "command": command, - "stdout": stdout, - "stderr": stderr, - "exit_code": None, # No exit code since the process was killed - "execution_time": execution_time, - "timeout": True, - "error": f"Command timed out after {timeout} seconds", - } - except Exception as e: - # Display error information - console.print_exception(show_locals=True) - console.print("[dim]" + "-" * 60 + "[/dim]\n") - - return { - "success": False, - "command": command, - "error": f"Error executing command: {str(e)}", - "stdout": "", - "stderr": "", - "exit_code": -1, - "timeout": False, - } - - -@code_generation_agent.tool def share_your_reasoning( - context: RunContext, reasoning: str, next_steps: str = None -) -> Dict[str, Any]: - """Share the agent's current reasoning and planned next steps with the user. + context: RunContext, reasoning: str, next_steps: str | None = None +) -> ReasoningOutput: + # Generate unique group_id for this reasoning session + group_id = generate_group_id( + "agent_reasoning", reasoning[:50] + ) # Use first 50 chars for context + + if not is_tui_mode(): + emit_divider(message_group=group_id) + emit_info( + "\n[bold white on purple] AGENT REASONING [/bold white on purple]", + message_group=group_id, + ) + emit_info("[bold cyan]Current reasoning:[/bold cyan]", message_group=group_id) + emit_system_message(Markdown(reasoning), message_group=group_id) + if next_steps is not None and next_steps.strip(): + emit_info( + "\n[bold cyan]Planned next steps:[/bold cyan]", message_group=group_id + ) + emit_system_message(Markdown(next_steps), message_group=group_id) + emit_info("[dim]" + "-" * 60 + "[/dim]\n", message_group=group_id) + return ReasoningOutput(**{"success": True}) - Args: - reasoning: The agent's current reasoning or thought process. - next_steps: Optional description of what the agent plans to do next. - Returns: - A dictionary with the reasoning information. - """ - console.print("\n[bold white on purple] AGENT REASONING [/bold white on purple]") +def register_agent_run_shell_command(agent): + """Register only the agent_run_shell_command tool.""" + + @agent.tool + def agent_run_shell_command( + context: RunContext, command: str = "", cwd: str = None, timeout: int = 60 + ) -> ShellCommandOutput: + """Execute a shell command with comprehensive monitoring and safety features. + + This tool provides robust shell command execution with streaming output, + timeout handling, user confirmation (when not in yolo mode), and proper + process lifecycle management. Commands are executed in a controlled + environment with cross-platform process group handling. + + Args: + command: The shell command to execute. Cannot be empty or whitespace-only. + cwd: Working directory for command execution. If None, + uses the current working directory. Defaults to None. + timeout: Inactivity timeout in seconds. If no output is + produced for this duration, the process will be terminated. + Defaults to 60 seconds. + + Returns: + ShellCommandOutput: A structured response containing: + - success (bool): True if command executed successfully (exit code 0) + - command (str | None): The executed command string + - error (str | None): Error message if execution failed + - stdout (str | None): Standard output from the command (last 256 lines) + - stderr (str | None): Standard error from the command (last 256 lines) + - exit_code (int | None): Process exit code + - execution_time (float | None): Total execution time in seconds + - timeout (bool | None): True if command was terminated due to timeout + - user_interrupted (bool | None): True if user killed the process + + Examples: + >>> # Basic command execution + >>> result = agent_run_shell_command(ctx, "ls -la") + >>> print(result.stdout) + + >>> # Command with working directory + >>> result = agent_run_shell_command(ctx, "npm test", "/path/to/project") + >>> if result.success: + ... print("Tests passed!") + + >>> # Command with custom timeout + >>> result = agent_run_shell_command(ctx, "long_running_command", timeout=300) + >>> if result.timeout: + ... print("Command timed out") + + Warning: + This tool can execute arbitrary shell commands. Exercise caution when + running untrusted commands, especially those that modify system state. + """ + return run_shell_command(context, command, cwd, timeout) + + +def register_agent_share_your_reasoning(agent): + """Register only the agent_share_your_reasoning tool.""" + + @agent.tool + def agent_share_your_reasoning( + context: RunContext, reasoning: str = "", next_steps: str | None = None + ) -> ReasoningOutput: + """Share the agent's current reasoning and planned next steps with the user. + + This tool provides transparency into the agent's decision-making process + by displaying the current reasoning and upcoming actions in a formatted, + user-friendly manner. It's essential for building trust and understanding + between the agent and user. - # Display the reasoning with markdown formatting - console.print("[bold cyan]Current reasoning:[/bold cyan]") - console.print(Markdown(reasoning)) + Args: + reasoning: The agent's current thought process, analysis, or + reasoning for the current situation. This should be clear, + comprehensive, and explain the 'why' behind decisions. + next_steps: Planned upcoming actions or steps + the agent intends to take. Can be None if no specific next steps + are determined. Defaults to None. - # Display next steps if provided - if next_steps and next_steps.strip(): - console.print("\n[bold cyan]Planned next steps:[/bold cyan]") - console.print(Markdown(next_steps)) + Returns: + ReasoningOutput: A simple response object containing: + - success (bool): Always True, indicating the reasoning was shared - console.print("[dim]" + "-" * 60 + "[/dim]\n") + Examples: + >>> reasoning = "I need to analyze the codebase structure first" + >>> next_steps = "First, I'll list the directory contents, then read key files" + >>> result = agent_share_your_reasoning(ctx, reasoning, next_steps) - return {"success": True, "reasoning": reasoning, "next_steps": next_steps} + Best Practice: + Use this tool frequently to maintain transparency. Call it: + - Before starting complex operations + - When changing strategy or approach + - To explain why certain decisions are being made + - When encountering unexpected situations + """ + return share_your_reasoning(context, reasoning, next_steps) diff --git a/code_puppy/tools/common.py b/code_puppy/tools/common.py index a9463afd..4c0438c3 100644 --- a/code_puppy/tools/common.py +++ b/code_puppy/tools/common.py @@ -1,3 +1,443 @@ +import fnmatch +import hashlib +import os +import time +from pathlib import Path +from typing import Optional, Tuple + +from rapidfuzz.distance import JaroWinkler from rich.console import Console -console = Console() +# Import our queue-based console system +try: + from code_puppy.messaging import get_queue_console + + # Use queue console by default, but allow fallback + NO_COLOR = bool(int(os.environ.get("CODE_PUPPY_NO_COLOR", "0"))) + _rich_console = Console(no_color=NO_COLOR) + console = get_queue_console() + # Set the fallback console for compatibility + console.fallback_console = _rich_console +except ImportError: + # Fallback to regular Rich console if messaging system not available + NO_COLOR = bool(int(os.environ.get("CODE_PUPPY_NO_COLOR", "0"))) + console = Console(no_color=NO_COLOR) + + +# ------------------- +# Shared ignore patterns/helpers +# ------------------- +IGNORE_PATTERNS = [ + # Version control + "**/.git/**", + "**/.git", + ".git/**", + ".git", + "**/.svn/**", + "**/.hg/**", + "**/.bzr/**", + # Node.js / JavaScript / TypeScript + "**/node_modules/**", + "**/node_modules/**/*.js", + "node_modules/**", + "node_modules", + "**/npm-debug.log*", + "**/yarn-debug.log*", + "**/yarn-error.log*", + "**/pnpm-debug.log*", + "**/.npm/**", + "**/.yarn/**", + "**/.pnpm-store/**", + "**/coverage/**", + "**/.nyc_output/**", + "**/dist/**", + "**/dist", + "**/build/**", + "**/build", + "**/.next/**", + "**/.nuxt/**", + "**/out/**", + "**/.cache/**", + "**/.parcel-cache/**", + "**/.vite/**", + "**/storybook-static/**", + "**/*.tsbuildinfo/**", + # Python + "**/__pycache__/**", + "**/__pycache__", + "__pycache__/**", + "__pycache__", + "**/*.pyc", + "**/*.pyo", + "**/*.pyd", + "**/.pytest_cache/**", + "**/.mypy_cache/**", + "**/.coverage", + "**/htmlcov/**", + "**/.tox/**", + "**/.nox/**", + "**/site-packages/**", + "**/.venv/**", + "**/.venv", + "**/venv/**", + "**/venv", + "**/env/**", + "**/ENV/**", + "**/.env", + "**/pip-wheel-metadata/**", + "**/*.egg-info/**", + "**/dist/**", + "**/wheels/**", + "**/pytest-reports/**", + # Java (Maven, Gradle, SBT) + "**/target/**", + "**/target", + "**/build/**", + "**/build", + "**/.gradle/**", + "**/gradle-app.setting", + "**/*.class", + "**/*.jar", + "**/*.war", + "**/*.ear", + "**/*.nar", + "**/hs_err_pid*", + "**/.classpath", + "**/.project", + "**/.settings/**", + "**/bin/**", + "**/project/target/**", + "**/project/project/**", + # Go + "**/vendor/**", + "**/*.exe", + "**/*.exe~", + "**/*.dll", + "**/*.so", + "**/*.dylib", + "**/*.test", + "**/*.out", + "**/go.work", + "**/go.work.sum", + # Rust + "**/target/**", + "**/Cargo.lock", + "**/*.pdb", + # Ruby + "**/vendor/**", + "**/.bundle/**", + "**/Gemfile.lock", + "**/*.gem", + "**/.rvm/**", + "**/.rbenv/**", + "**/coverage/**", + "**/.yardoc/**", + "**/doc/**", + "**/rdoc/**", + "**/.sass-cache/**", + "**/.jekyll-cache/**", + "**/_site/**", + # PHP + "**/vendor/**", + "**/composer.lock", + "**/.phpunit.result.cache", + "**/storage/logs/**", + "**/storage/framework/cache/**", + "**/storage/framework/sessions/**", + "**/storage/framework/testing/**", + "**/storage/framework/views/**", + "**/bootstrap/cache/**", + # .NET / C# + "**/bin/**", + "**/obj/**", + "**/packages/**", + "**/*.cache", + "**/*.dll", + "**/*.exe", + "**/*.pdb", + "**/*.user", + "**/*.suo", + "**/.vs/**", + "**/TestResults/**", + "**/BenchmarkDotNet.Artifacts/**", + # C/C++ + "**/*.o", + "**/*.obj", + "**/*.so", + "**/*.dll", + "**/*.a", + "**/*.lib", + "**/*.dylib", + "**/*.exe", + "**/CMakeFiles/**", + "**/CMakeCache.txt", + "**/cmake_install.cmake", + "**/Makefile", + "**/compile_commands.json", + "**/.deps/**", + "**/.libs/**", + "**/autom4te.cache/**", + # Perl + "**/blib/**", + "**/_build/**", + "**/Build", + "**/Build.bat", + "**/*.tmp", + "**/*.bak", + "**/*.old", + "**/Makefile.old", + "**/MANIFEST.bak", + "**/META.yml", + "**/META.json", + "**/MYMETA.*", + "**/.prove", + # Scala + "**/target/**", + "**/project/target/**", + "**/project/project/**", + "**/.bloop/**", + "**/.metals/**", + "**/.ammonite/**", + "**/*.class", + # Elixir + "**/_build/**", + "**/deps/**", + "**/*.beam", + "**/.fetch", + "**/erl_crash.dump", + "**/*.ez", + "**/doc/**", + "**/.elixir_ls/**", + # Swift + "**/.build/**", + "**/Packages/**", + "**/*.xcodeproj/**", + "**/*.xcworkspace/**", + "**/DerivedData/**", + "**/xcuserdata/**", + "**/*.dSYM/**", + # Kotlin + "**/build/**", + "**/.gradle/**", + "**/*.class", + "**/*.jar", + "**/*.kotlin_module", + # Clojure + "**/target/**", + "**/.lein-**", + "**/.nrepl-port", + "**/pom.xml.asc", + "**/*.jar", + "**/*.class", + # Dart/Flutter + "**/.dart_tool/**", + "**/build/**", + "**/.packages", + "**/pubspec.lock", + "**/*.g.dart", + "**/*.freezed.dart", + "**/*.gr.dart", + # Haskell + "**/dist/**", + "**/dist-newstyle/**", + "**/.stack-work/**", + "**/*.hi", + "**/*.o", + "**/*.prof", + "**/*.aux", + "**/*.hp", + "**/*.eventlog", + "**/*.tix", + # Erlang + "**/ebin/**", + "**/rel/**", + "**/deps/**", + "**/*.beam", + "**/*.boot", + "**/*.plt", + "**/erl_crash.dump", + # Common cache and temp directories + "**/.cache/**", + "**/cache/**", + "**/tmp/**", + "**/temp/**", + "**/.tmp/**", + "**/.temp/**", + "**/logs/**", + "**/*.log", + "**/*.log.*", + # IDE and editor files + "**/.idea/**", + "**/.idea", + "**/.vscode/**", + "**/.vscode", + "**/*.swp", + "**/*.swo", + "**/*~", + "**/.#*", + "**/#*#", + "**/.emacs.d/auto-save-list/**", + "**/.vim/**", + "**/.netrwhist", + "**/Session.vim", + "**/.sublime-project", + "**/.sublime-workspace", + # OS-specific files + "**/.DS_Store", + ".DS_Store", + "**/Thumbs.db", + "**/Desktop.ini", + "**/.directory", + "**/*.lnk", + # Common artifacts + "**/*.orig", + "**/*.rej", + "**/*.patch", + "**/*.diff", + "**/.*.orig", + "**/.*.rej", + # Backup files + "**/*~", + "**/*.bak", + "**/*.backup", + "**/*.old", + "**/*.save", + # Hidden files (but be careful with this one) + "**/.*", # Commented out as it might be too aggressive + # Binary image formats + "**/*.png", + "**/*.jpg", + "**/*.jpeg", + "**/*.gif", + "**/*.bmp", + "**/*.tiff", + "**/*.tif", + "**/*.webp", + "**/*.ico", + "**/*.svg", + # Binary document formats + "**/*.pdf", + "**/*.doc", + "**/*.docx", + "**/*.xls", + "**/*.xlsx", + "**/*.ppt", + "**/*.pptx", + # Archive formats + "**/*.zip", + "**/*.tar", + "**/*.gz", + "**/*.bz2", + "**/*.xz", + "**/*.rar", + "**/*.7z", + # Media files + "**/*.mp3", + "**/*.mp4", + "**/*.avi", + "**/*.mov", + "**/*.wmv", + "**/*.flv", + "**/*.wav", + "**/*.ogg", + # Font files + "**/*.ttf", + "**/*.otf", + "**/*.woff", + "**/*.woff2", + "**/*.eot", + # Other binary formats + "**/*.bin", + "**/*.dat", + "**/*.db", + "**/*.sqlite", + "**/*.sqlite3", +] + + +def should_ignore_path(path: str) -> bool: + """Return True if *path* matches any pattern in IGNORE_PATTERNS.""" + # Convert path to Path object for better pattern matching + path_obj = Path(path) + + for pattern in IGNORE_PATTERNS: + # Try pathlib's match method which handles ** patterns properly + try: + if path_obj.match(pattern): + return True + except ValueError: + # If pathlib can't handle the pattern, fall back to fnmatch + if fnmatch.fnmatch(path, pattern): + return True + + # Additional check: if pattern contains **, try matching against + # different parts of the path to handle edge cases + if "**" in pattern: + # Convert pattern to handle different path representations + simplified_pattern = pattern.replace("**/", "").replace("/**", "") + + # Check if any part of the path matches the simplified pattern + path_parts = path_obj.parts + for i in range(len(path_parts)): + subpath = Path(*path_parts[i:]) + if fnmatch.fnmatch(str(subpath), simplified_pattern): + return True + # Also check individual parts + if fnmatch.fnmatch(path_parts[i], simplified_pattern): + return True + + return False + + +def _find_best_window( + haystack_lines: list[str], + needle: str, +) -> Tuple[Optional[Tuple[int, int]], float]: + """ + Return (start, end) indices of the window with the highest + Jaro-Winkler similarity to `needle`, along with that score. + If nothing clears JW_THRESHOLD, return (None, score). + """ + needle = needle.rstrip("\n") + needle_lines = needle.splitlines() + win_size = len(needle_lines) + best_score = 0.0 + best_span: Optional[Tuple[int, int]] = None + best_window = "" + # Pre-join the needle once; join windows on the fly + for i in range(len(haystack_lines) - win_size + 1): + window = "\n".join(haystack_lines[i : i + win_size]) + score = JaroWinkler.normalized_similarity(window, needle) + if score > best_score: + best_score = score + best_span = (i, i + win_size) + best_window = window + + console.log(f"Best span: {best_span}") + console.log(f"Best window: {best_window}") + console.log(f"Best score: {best_score}") + return best_span, best_score + + +def generate_group_id(tool_name: str, extra_context: str = "") -> str: + """Generate a unique group_id for tool output grouping. + + Args: + tool_name: Name of the tool (e.g., 'list_files', 'edit_file') + extra_context: Optional extra context to make group_id more unique + + Returns: + A string in format: tool_name_hash + """ + # Create a unique identifier using timestamp, context, and a random component + import random + + timestamp = str(int(time.time() * 1000000)) # microseconds for more uniqueness + random_component = random.randint(1000, 9999) # Add randomness + context_string = f"{tool_name}_{timestamp}_{random_component}_{extra_context}" + + # Generate a short hash + hash_obj = hashlib.md5(context_string.encode()) + short_hash = hash_obj.hexdigest()[:8] + + return f"{tool_name}_{short_hash}" diff --git a/code_puppy/tools/file_modifications.py b/code_puppy/tools/file_modifications.py index 5dc73bae..53285346 100644 --- a/code_puppy/tools/file_modifications.py +++ b/code_puppy/tools/file_modifications.py @@ -1,277 +1,627 @@ -# file_modifications.py -import os -import difflib -from code_puppy.tools.common import console -from typing import Dict, Any -from code_puppy.agent import code_generation_agent -from pydantic_ai import RunContext - +"""Robust, always-diff-logging file-modification helpers + agent tools. +Key guarantees +-------------- +1. **A diff is printed _inline_ on every path** (success, no-op, or error) – no decorator magic. +2. **Full traceback logging** for unexpected errors via `_log_error`. +3. Helper functions stay print-free and return a `diff` key, while agent-tool wrappers handle + all console output. +""" +from __future__ import annotations -@code_generation_agent.tool -def modify_file( - context: RunContext, - file_path: str, - proposed_changes: str, - replace_content: str, - overwrite_entire_file: bool = False, -) -> Dict[str, Any]: - """Modify a file with proposed changes, generating a diff and applying the changes. +import difflib +import json +import os +import traceback +from typing import Any, Dict, List, Union - Args: - file_path: Path of the file to modify. - proposed_changes: The new content to replace the targeted section or entire file content. - replace_content: The content to replace. If blank or not present in the file, the whole file will be replaced ONLY if overwrite_entire_file is True. - overwrite_entire_file: Explicitly allow replacing the entire file content (default False). You MUST supply True to allow this. +import json_repair +from pydantic import BaseModel +from pydantic_ai import RunContext - Returns: - A dictionary with the operation result, including success status, message, and diff. - """ +from code_puppy.messaging import emit_error, emit_info, emit_warning +from code_puppy.tools.common import _find_best_window, generate_group_id + + +class DeleteSnippetPayload(BaseModel): + file_path: str + delete_snippet: str + + +class Replacement(BaseModel): + old_str: str + new_str: str + + +class ReplacementsPayload(BaseModel): + file_path: str + replacements: List[Replacement] + + +class ContentPayload(BaseModel): + file_path: str + content: str + overwrite: bool = False + + +EditFilePayload = Union[DeleteSnippetPayload, ReplacementsPayload, ContentPayload] + + +def _print_diff(diff_text: str, message_group: str = None) -> None: + """Pretty-print *diff_text* with colour-coding (always runs).""" + + emit_info( + "[bold cyan]\n── DIFF ────────────────────────────────────────────────[/bold cyan]", + message_group=message_group, + ) + if diff_text and diff_text.strip(): + for line in diff_text.splitlines(): + # Git-style diff coloring using markup strings for TUI compatibility + if line.startswith("+") and not line.startswith("+++"): + # Addition line - use markup string instead of Rich Text + emit_info( + f"[bold green]{line}[/bold green]", + highlight=False, + message_group=message_group, + ) + elif line.startswith("-") and not line.startswith("---"): + # Removal line - use markup string instead of Rich Text + emit_info( + f"[bold red]{line}[/bold red]", + highlight=False, + message_group=message_group, + ) + elif line.startswith("@@"): + # Hunk info - use markup string instead of Rich Text + emit_info( + f"[bold cyan]{line}[/bold cyan]", + highlight=False, + message_group=message_group, + ) + elif line.startswith("+++") or line.startswith("---"): + # Filename lines in diff - use markup string instead of Rich Text + emit_info( + f"[dim white]{line}[/dim white]", + highlight=False, + message_group=message_group, + ) + else: + # Context lines - no special formatting + emit_info(line, highlight=False, message_group=message_group) + else: + emit_info("[dim]-- no diff available --[/dim]", message_group=message_group) + emit_info( + "[bold cyan]───────────────────────────────────────────────────────[/bold cyan]", + message_group=message_group, + ) + + +def _log_error( + msg: str, exc: Exception | None = None, message_group: str = None +) -> None: + emit_error(f"{msg}", message_group=message_group) + if exc is not None: + emit_error(traceback.format_exc(), highlight=False, message_group=message_group) + + +def _delete_snippet_from_file( + context: RunContext | None, file_path: str, snippet: str, message_group: str = None +) -> Dict[str, Any]: file_path = os.path.abspath(file_path) - - console.print("\n[bold white on yellow] FILE MODIFICATION [/bold white on yellow]") - console.print(f"[bold yellow]Modifying:[/bold yellow] {file_path}") - + diff_text = "" try: - # Check if the file exists - if not os.path.exists(file_path): - console.print( - f"[bold red]Error:[/bold red] File '{file_path}' does not exist" - ) - return {"error": f"File '{file_path}' does not exist"} - - if not os.path.isfile(file_path): - console.print(f"[bold red]Error:[/bold red] '{file_path}' is not a file") - return {"error": f"'{file_path}' is not a file."} - + if not os.path.exists(file_path) or not os.path.isfile(file_path): + return {"error": f"File '{file_path}' does not exist.", "diff": diff_text} with open(file_path, "r", encoding="utf-8") as f: - current_content = f.read() - - # Decide how to modify - targeted_replacement = bool(replace_content) and ( - replace_content in current_content - ) - replace_content_provided = bool(replace_content) - - if targeted_replacement: - modified_content = current_content.replace( - replace_content, proposed_changes - ) - console.print(f"[cyan]Replacing targeted content in '{file_path}'[/cyan]") - elif not targeted_replacement: - # Only allow full replacement if explicitly authorized - if overwrite_entire_file: - modified_content = proposed_changes - if replace_content_provided: - console.print( - "[bold yellow]Target content not found—replacing the entire file by explicit request (overwrite_entire_file=True).[/bold yellow]" - ) - else: - console.print( - "[bold yellow]No target provided—replacing the entire file by explicit request (overwrite_entire_file=True).[/bold yellow]" - ) - else: - if not replace_content_provided: - msg = "Refusing to replace the entire file: No replace_content provided and overwrite_entire_file=False." - else: - msg = "Refusing to replace the entire file: Target content not found in file and overwrite_entire_file=False." - console.print(f"[bold red]Error:[/bold red] {msg}") - return { - "success": False, - "path": file_path, - "message": msg, - "diff": "", - "changed": False, - } - - # Generate a diff for display - diff_lines = list( + original = f.read() + if snippet not in original: + return { + "error": f"Snippet not found in file '{file_path}'.", + "diff": diff_text, + } + modified = original.replace(snippet, "") + diff_text = "".join( difflib.unified_diff( - current_content.splitlines(keepends=True), - modified_content.splitlines(keepends=True), + original.splitlines(keepends=True), + modified.splitlines(keepends=True), fromfile=f"a/{os.path.basename(file_path)}", tofile=f"b/{os.path.basename(file_path)}", n=3, ) ) - diff_text = "".join(diff_lines) - console.print("[bold cyan]Changes to be applied:[/bold cyan]") - if diff_text.strip(): - formatted_diff = "" - for line in diff_lines: - if line.startswith("+") and not line.startswith("+++"): - formatted_diff += f"[bold green]{line}[/bold green]" - elif line.startswith("-") and not line.startswith("---"): - formatted_diff += f"[bold red]{line}[/bold red]" - elif line.startswith("@"): - formatted_diff += f"[bold cyan]{line}[/bold cyan]" - else: - formatted_diff += line - console.print(formatted_diff) - else: - console.print("[dim]No changes detected - file content is identical[/dim]") - return { - "success": False, - "path": file_path, - "message": "No changes to apply.", - "diff": diff_text, - "changed": False, - } - - # Write the modified content to the file with open(file_path, "w", encoding="utf-8") as f: - f.write(modified_content) - + f.write(modified) return { "success": True, "path": file_path, - "message": f"File modified at '{file_path}'", - "diff": diff_text, + "message": "Snippet deleted from file.", "changed": True, + "diff": diff_text, } - except Exception as e: - return {"error": f"Error modifying file '{file_path}': {str(e)}"} + except Exception as exc: + return {"error": str(exc), "diff": diff_text} -@code_generation_agent.tool -def delete_snippet_from_file( - context: RunContext, file_path: str, snippet: str +def _replace_in_file( + context: RunContext | None, + path: str, + replacements: List[Dict[str, str]], + message_group: str = None, ) -> Dict[str, Any]: - console.log(f"🗑️ Deleting snippet from file [bold red]{file_path}[/bold red]") - """Delete a snippet from a file at the given file path. - - Args: - file_path: Path to the file to delete. - snippet: The snippet to delete. - - Returns: - A dictionary with status and message about the operation. - """ - file_path = os.path.abspath(file_path) + """Robust replacement engine with explicit edge‑case reporting.""" + file_path = os.path.abspath(path) - console.print("\n[bold white on red] SNIPPET DELETION [/bold white on red]") - console.print(f"[bold yellow]From file:[/bold yellow] {file_path}") - - try: - # Check if the file exists - if not os.path.exists(file_path): - console.print( - f"[bold red]Error:[/bold red] File '{file_path}' does not exist" - ) - return {"error": f"File '{file_path}' does not exist."} + with open(file_path, "r", encoding="utf-8") as f: + original = f.read() - # Check if it's a file (not a directory) - if not os.path.isfile(file_path): - console.print(f"[bold red]Error:[/bold red] '{file_path}' is not a file") - return {"error": f"'{file_path}' is not a file. Use rmdir for directories."} + modified = original + for rep in replacements: + old_snippet = rep.get("old_str", "") + new_snippet = rep.get("new_str", "") - # Read the file content - with open(file_path, "r", encoding="utf-8") as f: - content = f.read() + if old_snippet and old_snippet in modified: + modified = modified.replace(old_snippet, new_snippet) + continue - # Check if the snippet exists in the file - if snippet not in content: - console.print( - f"[bold red]Error:[/bold red] Snippet not found in file '{file_path}'" - ) - return {"error": f"Snippet not found in file '{file_path}'."} + orig_lines = modified.splitlines() + loc, score = _find_best_window(orig_lines, old_snippet) - # Remove the snippet from the file content - modified_content = content.replace(snippet, "") + if score < 0.95 or loc is None: + return { + "error": "No suitable match in file (JW < 0.95)", + "jw_score": score, + "received": old_snippet, + "diff": "", + } - # Generate a diff - diff_lines = list( - difflib.unified_diff( - content.splitlines(keepends=True), - modified_content.splitlines(keepends=True), - fromfile=f"a/{os.path.basename(file_path)}", - tofile=f"b/{os.path.basename(file_path)}", - n=3, # Context lines - ) + start, end = loc + modified = ( + "\n".join(orig_lines[:start]) + + "\n" + + new_snippet.rstrip("\n") + + "\n" + + "\n".join(orig_lines[end:]) ) - diff_text = "".join(diff_lines) + if modified == original: + emit_warning( + "No changes to apply – proposed content is identical.", + message_group=message_group, + ) + return { + "success": False, + "path": file_path, + "message": "No changes to apply.", + "changed": False, + "diff": "", + } - # Display the diff - console.print("[bold cyan]Changes to be applied:[/bold cyan]") - - if diff_text.strip(): - # Format the diff for display with colorization - formatted_diff = "" - for line in diff_lines: - if line.startswith("+") and not line.startswith("+++"): - formatted_diff += f"[bold green]{line}[/bold green]" - elif line.startswith("-") and not line.startswith("---"): - formatted_diff += f"[bold red]{line}[/bold red]" - elif line.startswith("@"): - formatted_diff += f"[bold cyan]{line}[/bold cyan]" - else: - formatted_diff += line + diff_text = "".join( + difflib.unified_diff( + original.splitlines(keepends=True), + modified.splitlines(keepends=True), + fromfile=f"a/{os.path.basename(file_path)}", + tofile=f"b/{os.path.basename(file_path)}", + n=3, + ) + ) + with open(file_path, "w", encoding="utf-8") as f: + f.write(modified) + return { + "success": True, + "path": file_path, + "message": "Replacements applied.", + "changed": True, + "diff": diff_text, + } + + +def _write_to_file( + context: RunContext | None, + path: str, + content: str, + overwrite: bool = False, + message_group: str = None, +) -> Dict[str, Any]: + file_path = os.path.abspath(path) - console.print(formatted_diff) - else: - console.print("[dim]No changes detected[/dim]") + try: + exists = os.path.exists(file_path) + if exists and not overwrite: return { "success": False, "path": file_path, - "message": "No changes needed.", + "message": f"Cowardly refusing to overwrite existing file: {file_path}", + "changed": False, "diff": "", } - # Write the modified content back to the file + diff_lines = difflib.unified_diff( + [] if not exists else [""], + content.splitlines(keepends=True), + fromfile="/dev/null" if not exists else f"a/{os.path.basename(file_path)}", + tofile=f"b/{os.path.basename(file_path)}", + n=3, + ) + diff_text = "".join(diff_lines) + + os.makedirs(os.path.dirname(file_path) or ".", exist_ok=True) with open(file_path, "w", encoding="utf-8") as f: - f.write(modified_content) + f.write(content) + action = "overwritten" if exists else "created" return { "success": True, "path": file_path, - "message": f"Snippet deleted from file '{file_path}'.", + "message": f"File '{file_path}' {action} successfully.", + "changed": True, "diff": diff_text, } - except PermissionError: - return {"error": f"Permission denied to delete '{file_path}'."} - except FileNotFoundError: - # This should be caught by the initial check, but just in case - return {"error": f"File '{file_path}' does not exist."} - except Exception as e: - return {"error": f"Error deleting file '{file_path}': {str(e)}"} - - -@code_generation_agent.tool -def delete_file(context: RunContext, file_path: str) -> Dict[str, Any]: - console.log(f"🗑️ Deleting file [bold red]{file_path}[/bold red]") - """Delete a file at the given file path. - - Args: - file_path: Path to the file to delete. - - Returns: - A dictionary with status and message about the operation. - """ - file_path = os.path.abspath(file_path) - try: - # Check if the file exists - if not os.path.exists(file_path): - return {"error": f"File '{file_path}' does not exist."} + except Exception as exc: + _log_error("Unhandled exception in write_to_file", exc) + return {"error": str(exc), "diff": ""} - # Check if it's a file (not a directory) - if not os.path.isfile(file_path): - return {"error": f"'{file_path}' is not a file. Use rmdir for directories."} - # Attempt to delete the file - os.remove(file_path) +def delete_snippet_from_file( + context: RunContext, file_path: str, snippet: str, message_group: str = None +) -> Dict[str, Any]: + emit_info( + f"🗑️ Deleting snippet from file [bold red]{file_path}[/bold red]", + message_group=message_group, + ) + res = _delete_snippet_from_file( + context, file_path, snippet, message_group=message_group + ) + diff = res.get("diff", "") + if diff: + _print_diff(diff, message_group=message_group) + return res + + +def write_to_file( + context: RunContext, + path: str, + content: str, + overwrite: bool, + message_group: str = None, +) -> Dict[str, Any]: + emit_info( + f"✏️ Writing file [bold blue]{path}[/bold blue]", message_group=message_group + ) + res = _write_to_file( + context, path, content, overwrite=overwrite, message_group=message_group + ) + diff = res.get("diff", "") + if diff: + _print_diff(diff, message_group=message_group) + return res + + +def replace_in_file( + context: RunContext, + path: str, + replacements: List[Dict[str, str]], + message_group: str = None, +) -> Dict[str, Any]: + emit_info( + f"♻️ Replacing text in [bold yellow]{path}[/bold yellow]", + message_group=message_group, + ) + res = _replace_in_file(context, path, replacements, message_group=message_group) + diff = res.get("diff", "") + if diff: + _print_diff(diff, message_group=message_group) + return res + + +def _edit_file( + context: RunContext, payload: EditFilePayload, group_id: str = None +) -> Dict[str, Any]: + """ + High-level implementation of the *edit_file* behaviour. + + This function performs the heavy-lifting after the lightweight agent-exposed wrapper has + validated / coerced the inbound *payload* to one of the Pydantic models declared at the top + of this module. + + Supported payload variants + -------------------------- + • **ContentPayload** – full file write / overwrite. + • **ReplacementsPayload** – targeted in-file replacements. + • **DeleteSnippetPayload** – remove an exact snippet. + + The helper decides which low-level routine to delegate to and ensures the resulting unified + diff is always returned so the caller can pretty-print it for the user. + + Parameters + ---------- + path : str + Path to the target file (relative or absolute) + diff : str + Either: + * Raw file content (for file creation) + * A JSON string with one of the following shapes: + {"content": "full file contents", "overwrite": true} + {"replacements": [ {"old_str": "foo", "new_str": "bar"}, ... ] } + {"delete_snippet": "text to remove"} + + The function auto-detects the payload type and routes to the appropriate internal helper. + """ + # Extract file_path from payload + file_path = os.path.abspath(payload.file_path) + + # Use provided group_id or generate one if not provided + if group_id is None: + group_id = generate_group_id("edit_file", file_path) + emit_info( + "\n[bold white on blue] EDIT FILE [/bold white on blue]", message_group=group_id + ) + try: + if isinstance(payload, DeleteSnippetPayload): + return delete_snippet_from_file( + context, file_path, payload.delete_snippet, message_group=group_id + ) + elif isinstance(payload, ReplacementsPayload): + # Convert Pydantic Replacement models to dict format for legacy compatibility + replacements_dict = [ + {"old_str": rep.old_str, "new_str": rep.new_str} + for rep in payload.replacements + ] + return replace_in_file( + context, file_path, replacements_dict, message_group=group_id + ) + elif isinstance(payload, ContentPayload): + file_exists = os.path.exists(file_path) + if file_exists and not payload.overwrite: + return { + "success": False, + "path": file_path, + "message": f"File '{file_path}' exists. Set 'overwrite': true to replace.", + "changed": False, + } + return write_to_file( + context, + file_path, + payload.content, + payload.overwrite, + message_group=group_id, + ) + else: + return { + "success": False, + "path": file_path, + "message": f"Unknown payload type: {type(payload)}", + "changed": False, + } + except Exception as e: + emit_error( + "Unable to route file modification tool call to sub-tool", + message_group=group_id, + ) + emit_error(str(e), message_group=group_id) return { - "success": True, + "success": False, "path": file_path, - "message": f"File '{file_path}' deleted successfully.", + "message": f"Something went wrong in file editing: {str(e)}", + "changed": False, } - except PermissionError: - return {"error": f"Permission denied to delete '{file_path}'."} - except FileNotFoundError: - # This should be caught by the initial check, but just in case - return {"error": f"File '{file_path}' does not exist."} - except Exception as e: - return {"error": f"Error deleting file '{file_path}': {str(e)}"} + + +def _delete_file( + context: RunContext, file_path: str, message_group: str = None +) -> Dict[str, Any]: + emit_info( + f"🗑️ Deleting file [bold red]{file_path}[/bold red]", message_group=message_group + ) + file_path = os.path.abspath(file_path) + try: + if not os.path.exists(file_path) or not os.path.isfile(file_path): + res = {"error": f"File '{file_path}' does not exist.", "diff": ""} + else: + with open(file_path, "r", encoding="utf-8") as f: + original = f.read() + diff_text = "".join( + difflib.unified_diff( + original.splitlines(keepends=True), + [], + fromfile=f"a/{os.path.basename(file_path)}", + tofile=f"b/{os.path.basename(file_path)}", + n=3, + ) + ) + os.remove(file_path) + res = { + "success": True, + "path": file_path, + "message": f"File '{file_path}' deleted successfully.", + "changed": True, + "diff": diff_text, + } + except Exception as exc: + _log_error("Unhandled exception in delete_file", exc) + res = {"error": str(exc), "diff": ""} + _print_diff(res.get("diff", ""), message_group=message_group) + return res + + +def register_edit_file(agent): + """Register only the edit_file tool.""" + + @agent.tool + def edit_file( + context: RunContext, + payload: EditFilePayload | str = "", + ) -> Dict[str, Any]: + """Comprehensive file editing tool supporting multiple modification strategies. + + This is the primary file modification tool that supports three distinct editing + approaches: full content replacement, targeted text replacements, and snippet + deletion. It provides robust diff generation, error handling, and automatic + retry capabilities for reliable file operations. + + Args: + context (RunContext): The PydanticAI runtime context for the agent. + payload: One of three payload types: + + ContentPayload: + - file_path (str): Path to file + - content (str): Full file content to write + - overwrite (bool, optional): Whether to overwrite existing files. + Defaults to False (safe mode). + + ReplacementsPayload: + - file_path (str): Path to file + - replacements (List[Replacement]): List of text replacements where + each Replacement contains: + - old_str (str): Exact text to find and replace + - new_str (str): Replacement text + + DeleteSnippetPayload: + - file_path (str): Path to file + - delete_snippet (str): Exact text snippet to remove from file + + Returns: + Dict[str, Any]: Operation result containing: + - success (bool): True if operation completed successfully + - path (str): Absolute path to the modified file + - message (str): Human-readable description of changes + - changed (bool): True if file content was actually modified + - diff (str, optional): Unified diff showing changes made + - error (str, optional): Error message if operation failed + + Examples: + >>> # Create new file with content + >>> payload = {"file_path": "hello.py", "content": "print('Hello!')", "overwrite": true} + >>> result = edit_file(ctx, payload) + + >>> # Replace text in existing file + >>> payload = { + ... "file_path": "config.py", + ... "replacements": [ + ... {"old_str": "debug = False", "new_str": "debug = True"} + ... ] + ... } + >>> result = edit_file(ctx, payload) + + >>> # Delete snippet from file + >>> payload = { + ... "file_path": "main.py", + ... "delete_snippet": "# TODO: remove this comment" + ... } + >>> result = edit_file(ctx, payload) + + Best Practices: + - Use replacements for targeted changes (most efficient) + - Use content payload only for new files or complete rewrites + - Always check the 'success' field before assuming changes worked + - Review the 'diff' field to understand what changed + - Use delete_snippet for removing specific code blocks + """ + # Handle string payload parsing (for models that send JSON strings) + + parse_error_message = """Examples: + >>> # Create new file with content + >>> payload = {"file_path": "hello.py", "content": "print('Hello!')", "overwrite": true} + >>> result = edit_file(ctx, payload) + + >>> # Replace text in existing file + >>> payload = { + ... "file_path": "config.py", + ... "replacements": [ + ... {"old_str": "debug = False", "new_str": "debug = True"} + ... ] + ... } + >>> result = edit_file(ctx, payload) + + >>> # Delete snippet from file + >>> payload = { + ... "file_path": "main.py", + ... "delete_snippet": "# TODO: remove this comment" + ... } + >>> result = edit_file(ctx, payload)""" + + if isinstance(payload, str): + try: + # Fallback for weird models that just can't help but send json strings... + payload = json.loads(json_repair.repair_json(payload)) + if "replacements" in payload: + payload = ReplacementsPayload(**payload) + elif "delete_snippet" in payload: + payload = DeleteSnippetPayload(**payload) + elif "content" in payload: + payload = ContentPayload(**payload) + else: + file_path = "Unknown" + if "file_path" in payload: + file_path = payload["file_path"] + return { + "success": False, + "path": file_path, + "message": f"One of 'content', 'replacements', or 'delete_snippet' must be provided in payload. Refer to the following examples: {parse_error_message}", + "changed": False, + } + except Exception as e: + return { + "success": False, + "path": "Not retrievable in Payload", + "message": f"edit_file call failed: {str(e)} - this means the tool failed to parse your inputs. Refer to the following examples: {parse_error_message}", + "changed": False, + } + + # Call _edit_file which will extract file_path from payload and handle group_id generation + result = _edit_file(context, payload) + if "diff" in result: + del result["diff"] + return result + + +def register_delete_file(agent): + """Register only the delete_file tool.""" + + @agent.tool + def delete_file(context: RunContext, file_path: str = "") -> Dict[str, Any]: + """Safely delete files with comprehensive logging and diff generation. + + This tool provides safe file deletion with automatic diff generation to show + exactly what content was removed. It includes proper error handling and + automatic retry capabilities for reliable operation. + + Args: + context (RunContext): The PydanticAI runtime context for the agent. + file_path (str): Path to the file to delete. Can be relative or absolute. + Must be an existing regular file (not a directory). + + Returns: + Dict[str, Any]: Operation result containing: + - success (bool): True if file was successfully deleted + - path (str): Absolute path to the deleted file + - message (str): Human-readable description of the operation + - changed (bool): True if file was actually removed + - error (str, optional): Error message if deletion failed + + Examples: + >>> # Delete a specific file + >>> result = delete_file(ctx, "temp_file.txt") + >>> if result['success']: + ... print(f"Deleted: {result['path']}") + + >>> # Handle deletion errors + >>> result = delete_file(ctx, "missing.txt") + >>> if not result['success']: + ... print(f"Error: {result.get('error', 'Unknown error')}") + + Best Practices: + - Always verify file exists before attempting deletion + - Check 'success' field to confirm operation completed + - Use list_files first to confirm file paths + - Cannot delete directories (use shell commands for that) + """ + # Generate group_id for delete_file tool execution + group_id = generate_group_id("delete_file", file_path) + result = _delete_file(context, file_path, message_group=group_id) + if "diff" in result: + del result["diff"] + return result diff --git a/code_puppy/tools/file_operations.py b/code_puppy/tools/file_operations.py index 8a312287..571d49d3 100644 --- a/code_puppy/tools/file_operations.py +++ b/code_puppy/tools/file_operations.py @@ -1,138 +1,337 @@ # file_operations.py + import os -import fnmatch -from typing import List, Dict, Any -from code_puppy.tools.common import console +import tempfile +from typing import List + +from pydantic import BaseModel, conint from pydantic_ai import RunContext -from code_puppy.agent import code_generation_agent - - -# Constants for file operations -IGNORE_PATTERNS = [ - "**/node_modules/**", - "**/.git/**", - "**/__pycache__/**", - "**/.DS_Store", - "**/.env", - "**/.venv/**", - "**/venv/**", - "**/.idea/**", - "**/.vscode/**", - "**/dist/**", - "**/build/**", - "**/*.pyc", - "**/*.pyo", - "**/*.pyd", - "**/*.so", - "**/*.dll", - "**/*.exe", -] - - -def should_ignore_path(path: str) -> bool: - """Check if the path should be ignored based on patterns.""" - for pattern in IGNORE_PATTERNS: - if fnmatch.fnmatch(path, pattern): - return True + +# --------------------------------------------------------------------------- +# Module-level helper functions (exposed for unit tests _and_ used as tools) +# --------------------------------------------------------------------------- +from code_puppy.messaging import ( + emit_divider, + emit_error, + emit_info, + emit_success, + emit_system_message, + emit_warning, +) +from code_puppy.tools.common import generate_group_id + + +# Pydantic models for tool return types +class ListedFile(BaseModel): + path: str | None + type: str | None + size: int = 0 + full_path: str | None + depth: int | None + + +class ListFileOutput(BaseModel): + content: str + error: str | None = None + + +class ReadFileOutput(BaseModel): + content: str | None + num_tokens: conint(lt=10000) + error: str | None = None + + +class MatchInfo(BaseModel): + file_path: str | None + line_number: int | None + line_content: str | None + + +class GrepOutput(BaseModel): + matches: List[MatchInfo] + + +def is_likely_home_directory(directory): + """Detect if directory is likely a user's home directory or common home subdirectory""" + abs_dir = os.path.abspath(directory) + home_dir = os.path.expanduser("~") + + # Exact home directory match + if abs_dir == home_dir: + return True + + # Check for common home directory subdirectories + common_home_subdirs = { + "Documents", + "Desktop", + "Downloads", + "Pictures", + "Music", + "Videos", + "Movies", + "Public", + "Library", + "Applications", # Cover macOS/Linux + } + if ( + os.path.basename(abs_dir) in common_home_subdirs + and os.path.dirname(abs_dir) == home_dir + ): + return True + return False -@code_generation_agent.tool -def list_files( - context: RunContext, directory: str = ".", recursive: bool = True -) -> List[Dict[str, Any]]: - """Recursively list all files in a directory, ignoring common patterns. +def is_project_directory(directory): + """Quick heuristic to detect if this looks like a project directory""" + project_indicators = { + "package.json", + "pyproject.toml", + "Cargo.toml", + "pom.xml", + "build.gradle", + "CMakeLists.txt", + ".git", + "requirements.txt", + "composer.json", + "Gemfile", + "go.mod", + "Makefile", + "setup.py", + } + + try: + contents = os.listdir(directory) + return any(indicator in contents for indicator in project_indicators) + except (OSError, PermissionError): + return False - Args: - directory: The directory to list files from. Defaults to current directory. - recursive: Whether to search recursively. Defaults to True. - Returns: - A list of dictionaries with file information including path, size, and type. - """ +def _list_files( + context: RunContext, directory: str = ".", recursive: bool = True +) -> ListFileOutput: + import shutil + import subprocess + import sys + results = [] directory = os.path.abspath(directory) - # Display directory listing header - console.print("\n[bold white on blue] DIRECTORY LISTING [/bold white on blue]") - console.print( - f"📂 [bold cyan]{directory}[/bold cyan] [dim](recursive={recursive})[/dim]" + # Build string representation + output_lines = [] + + directory_listing_header = ( + "\n[bold white on blue] DIRECTORY LISTING [/bold white on blue]" ) - console.print("[dim]" + "-" * 60 + "[/dim]") + output_lines.append(directory_listing_header) + + directory_info = f"\U0001f4c2 [bold cyan]{directory}[/bold cyan] [dim](recursive={recursive})[/dim]\n" + output_lines.append(directory_info) + + divider = "[dim]" + "─" * 100 + "\n" + "[/dim]" + output_lines.append(divider) if not os.path.exists(directory): - console.print( - f"[bold red]Error:[/bold red] Directory '{directory}' does not exist" + error_msg = ( + f"[red bold]Error:[/red bold] Directory '{directory}' does not exist" ) - console.print("[dim]" + "-" * 60 + "[/dim]\n") - return [{"error": f"Directory '{directory}' does not exist"}] + output_lines.append(error_msg) + output_lines.append(divider) + return ListFileOutput(content="\n".join(output_lines)) if not os.path.isdir(directory): - console.print(f"[bold red]Error:[/bold red] '{directory}' is not a directory") - console.print("[dim]" + "-" * 60 + "[/dim]\n") - return [{"error": f"'{directory}' is not a directory"}] - - # Track folders and files at each level for tree display - folder_structure = {} - file_list = [] - - for root, dirs, files in os.walk(directory): - # Skip ignored directories - dirs[:] = [d for d in dirs if not should_ignore_path(os.path.join(root, d))] - - rel_path = os.path.relpath(root, directory) - depth = 0 if rel_path == "." else rel_path.count(os.sep) + 1 - - if rel_path == ".": - rel_path = "" - - # Add directory entry to results - if rel_path: - dir_path = os.path.join(directory, rel_path) - results.append( - { - "path": rel_path, - "type": "directory", - "size": 0, - "full_path": dir_path, - "depth": depth, - } - ) + error_msg = f"[red bold]Error:[/red bold] '{directory}' is not a directory" + output_lines.append(error_msg) + + output_lines.append(divider) + return ListFileOutput(content="\n".join(output_lines)) + + # Smart home directory detection - auto-limit recursion for performance + # But allow recursion in tests (when context=None) or when explicitly requested + if context is not None and is_likely_home_directory(directory) and recursive: + if not is_project_directory(directory): + warning_msg = "[yellow bold]Warning:[/yellow bold] 🏠 Detected home directory - limiting to non-recursive listing for performance" + output_lines.append(warning_msg) + + info_msg = f"[dim]💡 To force recursive listing in home directory, use list_files('{directory}', recursive=True) explicitly[/dim]" + output_lines.append(info_msg) + recursive = False + + # Create a temporary ignore file with our ignore patterns + ignore_file = None + try: + # Find ripgrep executable - first check system PATH, then virtual environment + rg_path = shutil.which("rg") + if not rg_path: + # Try to find it in the virtual environment + # Use sys.executable to determine the Python environment path + python_dir = os.path.dirname(sys.executable) + # Check both 'bin' (Unix) and 'Scripts' (Windows) directories + for rg_dir in ["bin", "Scripts"]: + venv_rg_path = os.path.join(python_dir, "rg") + if os.path.exists(venv_rg_path): + rg_path = venv_rg_path + break + # Also check with .exe extension for Windows + venv_rg_exe_path = os.path.join(python_dir, "rg.exe") + if os.path.exists(venv_rg_exe_path): + rg_path = venv_rg_exe_path + break + + if not rg_path: + error_msg = "[red bold]Error:[/red bold] ripgrep (rg) not found. Please install ripgrep to use this tool." + output_lines.append(error_msg) + return ListFileOutput(content="\n".join(output_lines)) + + # Build command for ripgrep --files + cmd = [rg_path, "--files"] + + # For non-recursive mode, we'll limit depth after getting results + if not recursive: + cmd.extend(["--max-depth", "1"]) - # Add to folder structure for display - folder_structure[rel_path] = { - "path": rel_path, - "depth": depth, - "full_path": dir_path, - } - - # Add file entries - for file in files: - file_path = os.path.join(root, file) - if should_ignore_path(file_path): + # Add ignore patterns to the command via a temporary file + from code_puppy.tools.common import IGNORE_PATTERNS + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".ignore") as f: + ignore_file = f.name + for pattern in IGNORE_PATTERNS: + f.write(f"{pattern}\n") + + cmd.extend(["--ignore-file", ignore_file]) + cmd.append(directory) + + # Run ripgrep to get file listing + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) + + # Process the output lines + files = result.stdout.strip().split("\n") if result.stdout.strip() else [] + + # Create ListedFile objects with metadata + for full_path in files: + if not full_path: # Skip empty lines + continue + + # Skip if file doesn't exist (though it should) + if not os.path.exists(full_path): + continue + + # Extract relative path from the full path + if full_path.startswith(directory): + file_path = full_path[len(directory) :].lstrip(os.sep) + else: + file_path = full_path + + # For non-recursive mode, skip files in subdirectories + # Only check the relative path, not the full path + if not recursive and os.sep in file_path: continue - rel_file_path = os.path.join(rel_path, file) if rel_path else file + # Check if path is a file or directory + if os.path.isfile(full_path): + entry_type = "file" + size = os.path.getsize(full_path) + elif os.path.isdir(full_path): + entry_type = "directory" + size = 0 + else: + # Skip if it's neither a file nor directory + continue try: - size = os.path.getsize(file_path) - file_info = { - "path": rel_file_path, - "type": "file", - "size": size, - "full_path": file_path, - "depth": depth, - } - results.append(file_info) - file_list.append(file_info) - except (FileNotFoundError, PermissionError): + # Get stats for the entry + stat_info = os.stat(full_path) + actual_size = stat_info.st_size + + # For files, we use the actual size; for directories, we keep size=0 + if entry_type == "file": + size = actual_size + + # Calculate depth based on the relative path + depth = file_path.count(os.sep) + + # Add directory entries if needed for files + if entry_type == "file": + dir_path = os.path.dirname(file_path) + if dir_path: + # Add directory path components if they don't exist + path_parts = dir_path.split(os.sep) + for i in range(len(path_parts)): + partial_path = os.sep.join(path_parts[: i + 1]) + # Check if we already added this directory + if not any( + f.path == partial_path and f.type == "directory" + for f in results + ): + results.append( + ListedFile( + path=partial_path, + type="directory", + size=0, + full_path=os.path.join(directory, partial_path), + depth=partial_path.count(os.sep), + ) + ) + + # Add the entry (file or directory) + results.append( + ListedFile( + path=file_path, + type=entry_type, + size=size, + full_path=full_path, + depth=depth, + ) + ) + except (FileNotFoundError, PermissionError, OSError): # Skip files we can't access continue + # In non-recursive mode, we also need to explicitly list directories in the target directory + # ripgrep's --files option only returns files, not directories if not recursive: - break + try: + entries = os.listdir(directory) + for entry in entries: + full_entry_path = os.path.join(directory, entry) + # Skip if it doesn't exist or if it's a file (since files are already listed by ripgrep) + if not os.path.exists(full_entry_path) or os.path.isfile( + full_entry_path + ): + continue + + # For non-recursive mode, only include directories that are directly in the target directory + if os.path.isdir(full_entry_path): + # Create a ListedFile for the directory + results.append( + ListedFile( + path=entry, + type="directory", + size=0, + full_path=full_entry_path, + depth=0, + ) + ) + except (FileNotFoundError, PermissionError, OSError): + # Skip directories we can't access + pass + except subprocess.TimeoutExpired: + error_msg = ( + "[red bold]Error:[/red bold] List files command timed out after 30 seconds" + ) + output_lines.append(error_msg) + return ListFileOutput(content="\n".join(output_lines)) + except Exception as e: + error_msg = ( + f"[red bold]Error:[/red bold] Error during list files operation: {e}" + ) + output_lines.append(error_msg) + return ListFileOutput(content="\n".join(output_lines)) + finally: + # Clean up the temporary ignore file + if ignore_file and os.path.exists(ignore_file): + os.unlink(ignore_file) - # Helper function to format file size def format_size(size_bytes): if size_bytes < 1024: return f"{size_bytes} B" @@ -143,185 +342,478 @@ def format_size(size_bytes): else: return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB" - # Helper function to get file icon based on extension def get_file_icon(file_path): ext = os.path.splitext(file_path)[1].lower() if ext in [".py", ".pyw"]: - return "🐍" # Python + return "\U0001f40d" elif ext in [".js", ".jsx", ".ts", ".tsx"]: - return "📜" # JavaScript/TypeScript + return "\U0001f4dc" elif ext in [".html", ".htm", ".xml"]: - return "🌐" # HTML/XML + return "\U0001f310" elif ext in [".css", ".scss", ".sass"]: - return "🎨" # CSS + return "\U0001f3a8" elif ext in [".md", ".markdown", ".rst"]: - return "📝" # Markdown/docs + return "\U0001f4dd" elif ext in [".json", ".yaml", ".yml", ".toml"]: - return "⚙️" # Config files + return "\u2699\ufe0f" elif ext in [".jpg", ".jpeg", ".png", ".gif", ".svg", ".webp"]: - return "🖼️" # Images + return "\U0001f5bc\ufe0f" elif ext in [".mp3", ".wav", ".ogg", ".flac"]: - return "🎵" # Audio + return "\U0001f3b5" elif ext in [".mp4", ".avi", ".mov", ".webm"]: - return "🎬" # Video + return "\U0001f3ac" elif ext in [".pdf", ".doc", ".docx", ".xls", ".xlsx", ".ppt", ".pptx"]: - return "📄" # Documents + return "\U0001f4c4" elif ext in [".zip", ".tar", ".gz", ".rar", ".7z"]: - return "📦" # Archives + return "\U0001f4e6" elif ext in [".exe", ".dll", ".so", ".dylib"]: - return "⚡" # Executables + return "\u26a1" else: - return "📄" # Default file icon - - # Display tree structure - if results: - # Sort directories and files - - files = sorted( - [f for f in results if f["type"] == "file"], key=lambda x: x["path"] - ) + return "\U0001f4c4" - # First show directory itself - console.print( - f"📁 [bold blue]{os.path.basename(directory) or directory}[/bold blue]" - ) + # Count items in results + dir_count = sum(1 for item in results if item.type == "directory") + file_count = sum(1 for item in results if item.type == "file") + total_size = sum(item.size for item in results if item.type == "file") - # After gathering all results - # Combine both directories and files, then sort - all_items = sorted(results, key=lambda x: x["path"]) + # Build the directory header section + dir_name = os.path.basename(directory) or directory + dir_header = f"\U0001f4c1 [bold blue]{dir_name}[/bold blue]" + output_lines.append(dir_header) - parent_dirs_with_content = set() + # Sort all items by path for consistent display + all_items = sorted(results, key=lambda x: x.path) - for i, item in enumerate(all_items): - # Skip root directory - if item["type"] == "directory" and not item["path"]: + # Build file and directory tree representation + parent_dirs_with_content = set() + for item in all_items: + # Skip root directory entries with no path + if item.type == "directory" and not item.path: continue - # Get parent directories to track which ones have content - if os.sep in item["path"]: - parent_path = os.path.dirname(item["path"]) + # Track parent directories that contain files/dirs + if os.sep in item.path: + parent_path = os.path.dirname(item.path) parent_dirs_with_content.add(parent_path) - # Calculate depth from path - depth = item["path"].count(os.sep) + 1 if item["path"] else 0 - - # Calculate prefix for tree structure + # Calculate indentation depth based on path separators + depth = item.path.count(os.sep) + 1 if item.path else 0 prefix = "" for d in range(depth): if d == depth - 1: - prefix += "└── " + prefix += "\u2514\u2500\u2500 " else: prefix += " " - # Display item with appropriate icon and color - name = os.path.basename(item["path"]) or item["path"] + # Get the display name (basename) of the item + name = os.path.basename(item.path) or item.path - if item["type"] == "directory": - console.print(f"{prefix}📁 [bold blue]{name}/[/bold blue]") - else: # file - icon = get_file_icon(item["path"]) - size_str = format_size(item["size"]) - console.print( - f"{prefix}{icon} [green]{name}[/green] [dim]({size_str})[/dim]" - ) - else: - console.print("[yellow]Directory is empty[/yellow]") + # Add directory or file line with appropriate formatting + if item.type == "directory": + dir_line = f"{prefix}\U0001f4c1 [bold blue]{name}/[/bold blue]" + output_lines.append(dir_line) + else: + icon = get_file_icon(item.path) + size_str = format_size(item.size) + file_line = f"{prefix}{icon} [green]{name}[/green] [dim]({size_str})[/dim]" + output_lines.append(file_line) - # Display summary - dir_count = sum(1 for item in results if item["type"] == "directory") - file_count = sum(1 for item in results if item["type"] == "file") - total_size = sum(item["size"] for item in results if item["type"] == "file") + # Add summary information + summary_header = "\n[bold cyan]Summary:[/bold cyan]" + output_lines.append(summary_header) - console.print("\n[bold cyan]Summary:[/bold cyan]") - console.print( - f"📁 [blue]{dir_count} directories[/blue], 📄 [green]{file_count} files[/green] [dim]({format_size(total_size)} total)[/dim]" - ) - console.print("[dim]" + "-" * 60 + "[/dim]\n") - - return results - - -@code_generation_agent.tool -def create_file( - context: RunContext, file_path: str, content: str = "" -) -> Dict[str, Any]: - console.log(f"✨ Creating new file [bold green]{file_path}[/bold green]") - """Create a new file with optional content. - - Args: - file_path: Path where the file should be created - content: Optional content to write to the file - - Returns: - A dictionary with the result of the operation - """ + summary_line = f"\U0001f4c1 [blue]{dir_count} directories[/blue], \U0001f4c4 [green]{file_count} files[/green] [dim]({format_size(total_size)} total)[/dim]" + output_lines.append(summary_line) + + final_divider = "[dim]" + "─" * 100 + "\n" + "[/dim]" + output_lines.append(final_divider) + + # Return the content string + return ListFileOutput(content="\n".join(output_lines)) + + +def _read_file( + context: RunContext, + file_path: str, + start_line: int | None = None, + num_lines: int | None = None, +) -> ReadFileOutput: file_path = os.path.abspath(file_path) - # Check if file already exists - if os.path.exists(file_path): - return { - "error": f"File '{file_path}' already exists. Use modify_file to edit it." - } - - # Create parent directories if they don't exist - directory = os.path.dirname(file_path) - if directory and not os.path.exists(directory): - try: - os.makedirs(directory) - except Exception as e: - return {"error": f"Error creating directory '{directory}': {str(e)}"} - - # Create the file + # Generate group_id for this tool execution + group_id = generate_group_id("read_file", file_path) + + # Build console message with optional parameters + console_msg = f"\n[bold white on blue] READ FILE [/bold white on blue] \U0001f4c2 [bold cyan]{file_path}[/bold cyan]" + if start_line is not None and num_lines is not None: + console_msg += f" [dim](lines {start_line}-{start_line + num_lines - 1})[/dim]" + emit_info(console_msg, message_group=group_id) + + emit_divider(message_group=group_id) + if not os.path.exists(file_path): + error_msg = f"File {file_path} does not exist" + return ReadFileOutput(content=error_msg, num_tokens=0, error=error_msg) + if not os.path.isfile(file_path): + error_msg = f"{file_path} is not a file" + return ReadFileOutput(content=error_msg, num_tokens=0, error=error_msg) try: - with open(file_path, "w", encoding="utf-8") as f: - console.print("[yellow]Writing to file:[/yellow]") - console.print(content) - f.write(content) - - return { - "success": True, - "path": file_path, - "message": f"File created at '{file_path}'", - "content_length": len(content), - } + with open(file_path, "r", encoding="utf-8") as f: + if start_line is not None and num_lines is not None: + # Read only the specified lines + lines = f.readlines() + # Adjust for 1-based line numbering + start_idx = start_line - 1 + end_idx = start_idx + num_lines + # Ensure indices are within bounds + start_idx = max(0, start_idx) + end_idx = min(len(lines), end_idx) + content = "".join(lines[start_idx:end_idx]) + else: + # Read the entire file + content = f.read() + + # Simple approximation: ~4 characters per token + num_tokens = len(content) // 4 + if num_tokens > 10000: + return ReadFileOutput( + content=None, + error="The file is massive, greater than 10,000 tokens which is dangerous to read entirely. Please read this file in chunks.", + num_tokens=0, + ) + return ReadFileOutput(content=content, num_tokens=num_tokens) + except (FileNotFoundError, PermissionError): + # For backward compatibility with tests, return "FILE NOT FOUND" for these specific errors + error_msg = "FILE NOT FOUND" + return ReadFileOutput(content=error_msg, num_tokens=0, error=error_msg) except Exception as e: - return {"error": f"Error creating file '{file_path}': {str(e)}"} - - -@code_generation_agent.tool -def read_file(context: RunContext, file_path: str) -> Dict[str, Any]: - console.log(f"📄 Reading [bold cyan]{file_path}[/bold cyan]") - """Read the contents of a file. - - Args: - file_path: Path to the file to read - - Returns: - A dictionary with the file contents and metadata. - """ - file_path = os.path.abspath(file_path) + message = f"An error occurred trying to read the file: {e}" + return ReadFileOutput(content=message, num_tokens=0, error=message) - if not os.path.exists(file_path): - return {"error": f"File '{file_path}' does not exist"} - if not os.path.isfile(file_path): - return {"error": f"'{file_path}' is not a file"} +def _grep(context: RunContext, search_string: str, directory: str = ".") -> GrepOutput: + import json + import os + import shutil + import subprocess + import sys + + directory = os.path.abspath(directory) + matches: List[MatchInfo] = [] + # Generate group_id for this tool execution + group_id = generate_group_id("grep", f"{directory}_{search_string}") + + emit_info( + f"\n[bold white on blue] GREP [/bold white on blue] \U0001f4c2 [bold cyan]{directory}[/bold cyan] [dim]for '{search_string}'[/dim]", + message_group=group_id, + ) + emit_divider(message_group=group_id) + + # Create a temporary ignore file with our ignore patterns + ignore_file = None try: - with open(file_path, "r", encoding="utf-8") as f: - content = f.read() - - # Get file extension - _, ext = os.path.splitext(file_path) - - return { - "content": content, - "path": file_path, - "extension": ext.lstrip("."), - "total_lines": len(content.splitlines()), - } - except UnicodeDecodeError: - # For binary files, return an error - return {"error": f"Cannot read '{file_path}' as text - it may be a binary file"} + # Use ripgrep to search for the string + # Use absolute path to ensure it works from any directory + # --json for structured output + # --max-count 50 to limit results + # --max-filesize 5M to avoid huge files (increased from 1M) + # --type=all to search across all recognized text file types + # --ignore-file to obey our ignore list + + # Find ripgrep executable - first check system PATH, then virtual environment + rg_path = shutil.which("rg") + if not rg_path: + # Try to find it in the virtual environment + # Use sys.executable to determine the Python environment path + python_dir = os.path.dirname(sys.executable) + # Check both 'bin' (Unix) and 'Scripts' (Windows) directories + for rg_dir in ["bin", "Scripts"]: + venv_rg_path = os.path.join(python_dir, "rg") + if os.path.exists(venv_rg_path): + rg_path = venv_rg_path + break + # Also check with .exe extension for Windows + venv_rg_exe_path = os.path.join(python_dir, "rg.exe") + if os.path.exists(venv_rg_exe_path): + rg_path = venv_rg_exe_path + break + + if not rg_path: + emit_error( + "ripgrep (rg) not found. Please install ripgrep to use this tool.", + message_group=group_id, + ) + return GrepOutput(matches=[]) + + cmd = [ + rg_path, + "--json", + "--max-count", + "50", + "--max-filesize", + "5M", + "--type=all", + ] + + # Add ignore patterns to the command via a temporary file + from code_puppy.tools.common import IGNORE_PATTERNS + + with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".ignore") as f: + ignore_file = f.name + for pattern in IGNORE_PATTERNS: + f.write(f"{pattern}\n") + + cmd.extend(["--ignore-file", ignore_file]) + cmd.extend([search_string, directory]) + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) + + # Parse the JSON output from ripgrep + for line in result.stdout.strip().split("\n"): + if not line: + continue + try: + match_data = json.loads(line) + # Only process match events, not context or summary + if match_data.get("type") == "match": + data = match_data.get("data", {}) + path_data = data.get("path", {}) + file_path = ( + path_data.get("text", "") if path_data.get("text") else "" + ) + line_number = data.get("line_number", None) + line_content = ( + data.get("lines", {}).get("text", "") + if data.get("lines", {}).get("text") + else "" + ) + if len(line_content.strip()) > 512: + line_content = line_content.strip()[0:512] + if file_path and line_number: + match_info = MatchInfo( + file_path=file_path, + line_number=line_number, + line_content=line_content.strip(), + ) + matches.append(match_info) + # Limit to 50 matches total, same as original implementation + if len(matches) >= 50: + break + emit_system_message( + f"[green]Match:[/green] {file_path}:{line_number} - {line_content.strip()}", + message_group=group_id, + ) + except json.JSONDecodeError: + # Skip lines that aren't valid JSON + continue + + if not matches: + emit_warning( + f"No matches found for '{search_string}' in {directory}", + message_group=group_id, + ) + else: + emit_success( + f"Found {len(matches)} match(es) for '{search_string}' in {directory}", + message_group=group_id, + ) + + except subprocess.TimeoutExpired: + emit_error("Grep command timed out after 30 seconds", message_group=group_id) + except FileNotFoundError: + emit_error( + "ripgrep (rg) not found. Please install ripgrep to use this tool.", + message_group=group_id, + ) except Exception as e: - return {"error": f"Error reading file '{file_path}': {str(e)}"} + emit_error(f"Error during grep operation: {e}", message_group=group_id) + finally: + # Clean up the temporary ignore file + if ignore_file and os.path.exists(ignore_file): + os.unlink(ignore_file) + + return GrepOutput(matches=matches) + + +def register_list_files(agent): + """Register only the list_files tool.""" + from code_puppy.config import get_allow_recursion + + @agent.tool + def list_files( + context: RunContext, directory: str = ".", recursive: bool = True + ) -> ListFileOutput: + """List files and directories with intelligent filtering and safety features. + + This function will only allow recursive listing when the allow_recursion + configuration is set to true via the /set allow_recursion=true command. + + This tool provides comprehensive directory listing with smart home directory + detection, project-aware recursion, and token-safe output. It automatically + ignores common build artifacts, cache directories, and other noise while + providing rich file metadata and visual formatting. + + Args: + context (RunContext): The PydanticAI runtime context for the agent. + directory (str, optional): Path to the directory to list. Can be relative + or absolute. Defaults to "." (current directory). + recursive (bool, optional): Whether to recursively list subdirectories. + Automatically disabled for home directories unless they contain + project indicators. Also requires allow_recursion=true in config. + Defaults to True. + + Returns: + ListFileOutput: A response containing: + - content (str): String representation of the directory listing + - error (str | None): Error message if listing failed + + Examples: + >>> # List current directory + >>> result = list_files(ctx) + >>> print(result.content) + + >>> # List specific directory non-recursively + >>> result = list_files(ctx, "/path/to/project", recursive=False) + >>> print(result.content) + + >>> # Handle potential errors + >>> result = list_files(ctx, "/nonexistent/path") + >>> if result.error: + ... print(f"Error: {result.error}") + + Best Practices: + - Always use this before reading/modifying files + - Use non-recursive for quick directory overviews + - Check for errors in the response + - Combine with grep to find specific file patterns + """ + warning = None + if recursive and not get_allow_recursion(): + warning = "Recursion disabled globally for list_files - returning non-recursive results" + recursive = False + result = _list_files(context, directory, recursive) + + # Emit the content directly to ensure it's displayed to the user + emit_info( + result.content, message_group=generate_group_id("list_files", directory) + ) + if warning: + result.error = warning + if (len(result.content)) > 200000: + result.content = result.content[0:200000] + result.error = "Results truncated. This is a massive directory tree, recommend non-recursive calls to list_files" + return result + + +def register_read_file(agent): + """Register only the read_file tool.""" + + @agent.tool + def read_file( + context: RunContext, + file_path: str = "", + start_line: int | None = None, + num_lines: int | None = None, + ) -> ReadFileOutput: + """Read file contents with optional line-range selection and token safety. + + This tool provides safe file reading with automatic token counting and + optional line-range selection for handling large files efficiently. + It protects against reading excessively large files that could overwhelm + the agent's context window. + + Args: + context (RunContext): The PydanticAI runtime context for the agent. + file_path (str): Path to the file to read. Can be relative or absolute. + Cannot be empty. + start_line (int | None, optional): Starting line number for partial reads + (1-based indexing). If specified, num_lines must also be provided. + Defaults to None (read entire file). + num_lines (int | None, optional): Number of lines to read starting from + start_line. Must be specified if start_line is provided. + Defaults to None (read to end of file). + + Returns: + ReadFileOutput: A structured response containing: + - content (str | None): The file contents or error message + - num_tokens (int): Estimated token count (constrained to < 10,000) + - error (str | None): Error message if reading failed + + Examples: + >>> # Read entire file + >>> result = read_file(ctx, "example.py") + >>> print(f"Read {result.num_tokens} tokens") + >>> print(result.content) + + >>> # Read specific line range + >>> result = read_file(ctx, "large_file.py", start_line=10, num_lines=20) + >>> print("Lines 10-29:", result.content) + + >>> # Handle errors + >>> result = read_file(ctx, "missing.txt") + >>> if result.error: + ... print(f"Error: {result.error}") + + Best Practices: + - Always check for errors before using content + - Use line ranges for large files to avoid token limits + - Monitor num_tokens to stay within context limits + - Combine with list_files to find files first + """ + return _read_file(context, file_path, start_line, num_lines) + + +def register_grep(agent): + """Register only the grep tool.""" + + @agent.tool + def grep( + context: RunContext, search_string: str = "", directory: str = "." + ) -> GrepOutput: + """Recursively search for text patterns across files using ripgrep (rg). + + This tool leverages the high-performance ripgrep utility for fast text + searching across directory trees. It searches across all recognized text file + types (Python, JavaScript, HTML, CSS, Markdown, etc.) while automatically + filtering binary files and limiting results for performance. + + The search_string parameter supports ripgrep's full flag syntax, allowing + advanced searches including regex patterns, case-insensitive matching, + and other ripgrep features. + + Args: + context (RunContext): The PydanticAI runtime context for the agent. + search_string (str): The text pattern to search for. Can include ripgrep + flags like '--ignore-case', '-w' (word boundaries), etc. + Cannot be empty. + directory (str, optional): Root directory to start the recursive search. + Can be relative or absolute. Defaults to "." (current directory). + + Returns: + GrepOutput: A structured response containing: + - matches (List[MatchInfo]): List of matches found, where each + MatchInfo contains: + - file_path (str | None): Absolute path to the file containing the match + - line_number (int | None): Line number where match was found (1-based) + - line_content (str | None): Full line content containing the match + + Examples: + >>> # Simple text search + >>> result = grep(ctx, "def my_function") + >>> for match in result.matches: + ... print(f"{match.file_path}:{match.line_number}: {match.line_content}") + + >>> # Case-insensitive search + >>> result = grep(ctx, "--ignore-case TODO", "/path/to/project/src") + >>> print(f"Found {len(result.matches)} TODO items") + + >>> # Word boundary search (regex) + >>> result = grep(ctx, "-w \\w+State\\b") + >>> files_with_state = {match.file_path for match in result.matches} + + Best Practices: + - Use specific search terms to avoid too many results + - Leverage ripgrep's powerful regex and flag features for advanced searches + - ripgrep is much faster than naive implementations + - Results are capped at 50 matches for performance + """ + return _grep(context, search_string, directory) diff --git a/code_puppy/tools/tools_content.py b/code_puppy/tools/tools_content.py new file mode 100644 index 00000000..e35d2908 --- /dev/null +++ b/code_puppy/tools/tools_content.py @@ -0,0 +1,53 @@ +tools_content = """ +Woof! 🐶 Here's my complete toolkit! I'm like a Swiss Army knife but way more fun: + +# **File Operations** +- **`list_files(directory, recursive)`** - Browse directories like a good sniffing dog! Shows files, directories, sizes, and depth +- **`read_file(file_path)`** - Read any file content (with line count info) +- **`edit_file(path, diff)`** - The ultimate file editor! Can: + - ✅ Create new files + - ✅ Overwrite entire files + - ✅ Make targeted replacements (preferred method!) + - ✅ Delete specific snippets +- **`delete_file(file_path)`** - Remove files when needed (use with caution!) + +# **Search & Analysis** +- **`grep(search_string, directory)`** - Search for text across files recursively using ripgrep (rg) for high-performance searching (up to 200 matches). Searches across all text file types, not just Python files. Supports ripgrep flags in the search string. + +# 💻 **System Operations** +- **`agent_run_shell_command(command, cwd, timeout)`** - Execute shell commands with full output capture (stdout, stderr, exit codes) + +# **Network Operations** +- **`grab_json_from_url(url)`** - Fetch JSON data from URLs (when network allows) + +# **Agent Communication** +- **`agent_share_your_reasoning(reasoning, next_steps)`** - Let you peek into my thought process (transparency is key!) +- **`final_result(output_message, awaiting_user_input)`** - Deliver final responses to you + +# **Tool Usage Philosophy** + +I follow these principles religiously: +- **DRY** - Don't Repeat Yourself +- **YAGNI** - You Ain't Gonna Need It +- **SOLID** - Single responsibility, Open/closed, etc. +- **Files under 600 lines** - Keep things manageable! + +# **Pro Tips** + +- For `edit_file`, I prefer **targeted replacements** over full file overwrites (more efficient!) +- I always use `agent_share_your_reasoning` before major operations to explain my thinking +- When running tests, I use `--silent` flags for JS/TS to avoid spam +- I explore with `list_files` before modifying anything + +# **What I Can Do** + +With these tools, I can: +- 📝 Write, modify, and organize code +- 🔍 Analyze codebases and find patterns +- ⚡ Run tests and debug issues +- 📊 Generate documentation and reports +- 🔄 Automate development workflows +- 🧹 Refactor code following best practices + +Ready to fetch some code sticks and build amazing software together? 🔧✨ +""" diff --git a/code_puppy/tools/web_search.py b/code_puppy/tools/web_search.py deleted file mode 100644 index d97760b9..00000000 --- a/code_puppy/tools/web_search.py +++ /dev/null @@ -1,41 +0,0 @@ -from code_puppy.agent import code_generation_agent -from typing import List, Dict -import requests -from bs4 import BeautifulSoup -from pydantic_ai import RunContext - - -@code_generation_agent.tool -def web_search( - context: RunContext, query: str, num_results: int = 5 -) -> List[Dict[str, str]]: - """Perform a web search and return a list of results with titles and URLs. - - Args: - query: The search query. - num_results: Number of results to return. Defaults to 5. - - Returns: - A list of dictionaries, each containing 'title' and 'url' for a search result. - """ - search_url = "https://www.google.com/search" - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3" - } - params = {"q": query} - - response = requests.get(search_url, headers=headers, params=params) - response.raise_for_status() - - soup = BeautifulSoup(response.text, "html.parser") - results = [] - - for g in soup.find_all("div", class_="tF2Cxc")[:num_results]: - title_element = g.find("h3") - link_element = g.find("a") - if title_element and link_element: - title = title_element.get_text() - url = link_element["href"] - results.append({"title": title, "url": url}) - - return results diff --git a/code_puppy/tui/__init__.py b/code_puppy/tui/__init__.py new file mode 100644 index 00000000..85d8c8c2 --- /dev/null +++ b/code_puppy/tui/__init__.py @@ -0,0 +1,10 @@ +""" +Code Puppy TUI package. + +This package provides a modern Text User Interface for Code Puppy using the Textual framework. +It maintains compatibility with existing functionality while providing an enhanced user experience. +""" + +from .app import CodePuppyTUI, run_textual_ui + +__all__ = ["CodePuppyTUI", "run_textual_ui"] diff --git a/code_puppy/tui/app.py b/code_puppy/tui/app.py new file mode 100644 index 00000000..8586eb9b --- /dev/null +++ b/code_puppy/tui/app.py @@ -0,0 +1,1102 @@ +""" +Main TUI application class. +""" + +from datetime import datetime, timezone + +from textual import on +from textual.app import App, ComposeResult +from textual.binding import Binding +from textual.containers import Container +from textual.events import Resize +from textual.reactive import reactive +from textual.widgets import Footer, ListView + +# message_history_accumulator and prune_interrupted_tool_calls have been moved to BaseAgent class +from code_puppy.agents.agent_manager import get_current_agent +from code_puppy.command_line.command_handler import handle_command +from code_puppy.config import ( + get_global_model_name, + get_puppy_name, + initialize_command_history_file, + save_command_to_history, +) + +# Import our message queue system +from code_puppy.messaging import TUIRenderer, get_global_queue +from code_puppy.tui.components import ( + ChatView, + CustomTextArea, + InputArea, + Sidebar, + StatusBar, +) + +# Import shared message classes +from .messages import CommandSelected, HistoryEntrySelected +from .models import ChatMessage, MessageType +from .screens import HelpScreen, MCPInstallWizardScreen, SettingsScreen, ToolsScreen + + +class CodePuppyTUI(App): + """Main Code Puppy TUI application.""" + + TITLE = "Code Puppy - AI Code Assistant" + SUB_TITLE = "TUI Mode" + + CSS = """ + Screen { + layout: horizontal; + } + + #main-area { + layout: vertical; + width: 1fr; + min-width: 40; + } + + #chat-container { + height: 1fr; + min-height: 10; + } + """ + + BINDINGS = [ + Binding("ctrl+q", "quit", "Quit"), + Binding("ctrl+c", "quit", "Quit"), + Binding("ctrl+l", "clear_chat", "Clear Chat"), + Binding("ctrl+1", "show_help", "Help"), + Binding("ctrl+2", "toggle_sidebar", "History"), + Binding("ctrl+3", "open_settings", "Settings"), + Binding("ctrl+4", "show_tools", "Tools"), + Binding("ctrl+5", "focus_input", "Focus Prompt"), + Binding("ctrl+6", "focus_chat", "Focus Response"), + Binding("ctrl+t", "open_mcp_wizard", "MCP Install Wizard"), + ] + + # Reactive variables for app state + current_model = reactive("") + puppy_name = reactive("") + current_agent = reactive("") + agent_busy = reactive(False) + + def watch_agent_busy(self) -> None: + """Watch for changes to agent_busy state.""" + # Update the submit/cancel button state when agent_busy changes + self._update_submit_cancel_button(self.agent_busy) + + def watch_current_agent(self) -> None: + """Watch for changes to current_agent and update title.""" + self._update_title() + + def _update_title(self) -> None: + """Update the application title to include current agent.""" + if self.current_agent: + self.title = f"Code Puppy - {self.current_agent}" + self.sub_title = "TUI Mode" + else: + self.title = "Code Puppy - AI Code Assistant" + self.sub_title = "TUI Mode" + + def _on_agent_reload(self, agent_id: str, agent_name: str) -> None: + """Callback for when agent is reloaded/changed.""" + # Get the updated agent configuration + from code_puppy.agents.agent_manager import get_current_agent + + current_agent_config = get_current_agent() + new_agent_display = ( + current_agent_config.display_name if current_agent_config else "code-puppy" + ) + + # Update the reactive variable (this will trigger watch_current_agent) + self.current_agent = new_agent_display + + # Add a system message to notify the user + self.add_system_message(f"🔄 Switched to agent: {new_agent_display}") + + def __init__(self, initial_command: str = None, **kwargs): + super().__init__(**kwargs) + self._current_worker = None + self.initial_command = initial_command + + # Initialize message queue renderer + self.message_queue = get_global_queue() + self.message_renderer = TUIRenderer(self.message_queue, self) + self._renderer_started = False + + def compose(self) -> ComposeResult: + """Create the UI layout.""" + yield StatusBar() + yield Sidebar() + with Container(id="main-area"): + with Container(id="chat-container"): + yield ChatView(id="chat-view") + yield InputArea() + yield Footer() + + def on_mount(self) -> None: + """Initialize the application when mounted.""" + # Register this app instance for global access + from code_puppy.tui_state import set_tui_app_instance + + set_tui_app_instance(self) + + # Register callback for agent reload events + from code_puppy.callbacks import register_callback + + register_callback("agent_reload", self._on_agent_reload) + + # Load configuration + self.current_model = get_global_model_name() + self.puppy_name = get_puppy_name() + + # Get current agent information + from code_puppy.agents.agent_manager import get_current_agent + + current_agent_config = get_current_agent() + self.current_agent = ( + current_agent_config.display_name if current_agent_config else "code-puppy" + ) + + # Initial title update + self._update_title() + + # Use runtime manager to ensure we always have the current agent + # Update status bar + status_bar = self.query_one(StatusBar) + status_bar.current_model = self.current_model + status_bar.puppy_name = self.puppy_name + status_bar.agent_status = "Ready" + + # Add welcome message with YOLO mode notification + self.add_system_message( + "Welcome to Code Puppy 🐶!\n💨 YOLO mode is enabled in TUI: commands will execute without confirmation." + ) + + # Start the message renderer EARLY to catch startup messages + # Using call_after_refresh to start it as soon as possible after mount + self.call_after_refresh(self.start_message_renderer_sync) + + # Kick off a non-blocking preload of the agent/model so the + # status bar shows loading before first prompt + self.call_after_refresh(self.preload_agent_on_startup) + + # After preload, offer to restore an autosave session (like interactive mode) + self.call_after_refresh(self.maybe_prompt_restore_autosave) + + # Apply responsive design adjustments + self.apply_responsive_layout() + + # Auto-focus the input field so user can start typing immediately + self.call_after_refresh(self.focus_input_field) + + # Process initial command if provided + if self.initial_command: + self.call_after_refresh(self.process_initial_command) + + def _tighten_text(self, text: str) -> str: + """Aggressively tighten whitespace: trim lines, collapse multiples, drop extra blanks.""" + try: + import re + + # Split into lines, strip each, drop empty runs + lines = [re.sub(r"\s+", " ", ln.strip()) for ln in text.splitlines()] + # Remove consecutive blank lines + tight_lines = [] + last_blank = False + for ln in lines: + is_blank = (ln == "") + if is_blank and last_blank: + continue + tight_lines.append(ln) + last_blank = is_blank + return "\n".join(tight_lines).strip() + except Exception: + return text.strip() + + def add_system_message( + self, content: str, message_group: str = None, group_id: str = None + ) -> None: + """Add a system message to the chat.""" + # Support both parameter names for backward compatibility + final_group_id = message_group or group_id + # Tighten only plain strings + content_to_use = ( + self._tighten_text(content) if isinstance(content, str) else content + ) + message = ChatMessage( + id=f"sys_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.SYSTEM, + content=content_to_use, + timestamp=datetime.now(timezone.utc), + group_id=final_group_id, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_system_message_rich( + self, rich_content, message_group: str = None, group_id: str = None + ) -> None: + """Add a system message with Rich content (like Markdown) to the chat.""" + # Support both parameter names for backward compatibility + final_group_id = message_group or group_id + message = ChatMessage( + id=f"sys_rich_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.SYSTEM, + content=rich_content, # Store the Rich object directly + timestamp=datetime.now(timezone.utc), + group_id=final_group_id, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_user_message(self, content: str, message_group: str = None) -> None: + """Add a user message to the chat.""" + message = ChatMessage( + id=f"user_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.USER, + content=content, + timestamp=datetime.now(timezone.utc), + group_id=message_group, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_agent_message(self, content: str, message_group: str = None) -> None: + """Add an agent message to the chat.""" + message = ChatMessage( + id=f"agent_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.AGENT_RESPONSE, + content=content, + timestamp=datetime.now(timezone.utc), + group_id=message_group, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_error_message(self, content: str, message_group: str = None) -> None: + """Add an error message to the chat.""" + content_to_use = ( + self._tighten_text(content) if isinstance(content, str) else content + ) + message = ChatMessage( + id=f"error_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.ERROR, + content=content_to_use, + timestamp=datetime.now(timezone.utc), + group_id=message_group, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_agent_reasoning_message( + self, content: str, message_group: str = None + ) -> None: + """Add an agent reasoning message to the chat.""" + message = ChatMessage( + id=f"agent_reasoning_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.AGENT_REASONING, + content=content, + timestamp=datetime.now(timezone.utc), + group_id=message_group, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def add_planned_next_steps_message( + self, content: str, message_group: str = None + ) -> None: + """Add an planned next steps to the chat.""" + message = ChatMessage( + id=f"planned_next_steps_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.PLANNED_NEXT_STEPS, + content=content, + timestamp=datetime.now(timezone.utc), + group_id=message_group, + ) + chat_view = self.query_one("#chat-view", ChatView) + chat_view.add_message(message) + + def on_custom_text_area_message_sent( + self, event: CustomTextArea.MessageSent + ) -> None: + """Handle message sent from custom text area.""" + self.action_send_message() + + def on_input_area_submit_requested(self, event) -> None: + """Handle submit button clicked.""" + self.action_send_message() + + def on_input_area_cancel_requested(self, event) -> None: + """Handle cancel button clicked.""" + self.action_cancel_processing() + + async def on_key(self, event) -> None: + """Handle app-level key events.""" + input_field = self.query_one("#input-field", CustomTextArea) + + # Only handle keys when input field is focused + if input_field.has_focus: + # Handle Ctrl+Enter or Shift+Enter for a new line + if event.key in ("ctrl+enter", "shift+enter"): + input_field.insert("\n") + event.prevent_default() + return + + # Check if a modal is currently active - if so, let the modal handle keys + if hasattr(self, "_active_screen") and self._active_screen: + # Don't handle keys at the app level when a modal is active + return + + # Handle arrow keys for sidebar navigation when sidebar is visible + if not input_field.has_focus: + try: + sidebar = self.query_one(Sidebar) + if sidebar.display: + # Handle navigation for the currently active tab + tabs = self.query_one("#sidebar-tabs") + active_tab = tabs.active + + if active_tab == "history-tab": + history_list = self.query_one("#history-list", ListView) + if event.key == "enter": + if history_list.highlighted_child and hasattr( + history_list.highlighted_child, "command_entry" + ): + # Show command history modal + from .components.command_history_modal import ( + CommandHistoryModal, + ) + + # Make sure sidebar's current_history_index is synced with the ListView + sidebar.current_history_index = history_list.index + + # Push the modal screen + # The modal will get the command entries from the sidebar + self.push_screen(CommandHistoryModal()) + event.prevent_default() + return + except Exception: + pass + + def refresh_history_display(self) -> None: + """Refresh the history display with the command history file.""" + try: + sidebar = self.query_one(Sidebar) + sidebar.load_command_history() + except Exception: + pass # Silently fail if history list not available + + def action_send_message(self) -> None: + """Send the current message.""" + input_field = self.query_one("#input-field", CustomTextArea) + message = input_field.text.strip() + + if message: + # Clear input + input_field.text = "" + + # Add user message to chat + self.add_user_message(message) + + # Save command to history file with timestamp + try: + save_command_to_history(message) + except Exception as e: + self.add_error_message(f"Failed to save command history: {str(e)}") + + # Update button state + self._update_submit_cancel_button(True) + + # Process the message asynchronously using Textual's worker system + # Using exclusive=False to avoid TaskGroup conflicts with MCP servers + self._current_worker = self.run_worker( + self.process_message(message), exclusive=False + ) + + def _update_submit_cancel_button(self, is_cancel_mode: bool) -> None: + """Update the submit/cancel button state.""" + try: + from .components.input_area import SubmitCancelButton + + button = self.query_one(SubmitCancelButton) + button.is_cancel_mode = is_cancel_mode + except Exception: + pass # Silently fail if button not found + + def action_cancel_processing(self) -> None: + """Cancel the current message processing.""" + if hasattr(self, "_current_worker") and self._current_worker is not None: + try: + # First, kill any running shell processes (same as interactive mode Ctrl+C) + from code_puppy.tools.command_runner import ( + kill_all_running_shell_processes, + ) + + killed = kill_all_running_shell_processes() + if killed: + self.add_system_message( + f"🔥 Cancelled {killed} running shell process(es)" + ) + # Don't stop spinner/agent - let the agent continue processing + # Shell processes killed, but agent worker continues running + + else: + # Only cancel the agent task if NO processes were killed + self._current_worker.cancel() + self.add_system_message("⚠️ Processing cancelled by user") + # Stop spinner and clear state only when agent is actually cancelled + self._current_worker = None + self.agent_busy = False + self.stop_agent_progress() + except Exception as e: + self.add_error_message(f"Failed to cancel processing: {str(e)}") + # Only clear state on exception if we haven't already done so + if ( + hasattr(self, "_current_worker") + and self._current_worker is not None + ): + self._current_worker = None + self.agent_busy = False + self.stop_agent_progress() + + async def process_message(self, message: str) -> None: + """Process a user message asynchronously.""" + try: + self.agent_busy = True + self._update_submit_cancel_button(True) + self.start_agent_progress("Thinking") + + # Handle commands + if message.strip().startswith("/"): + # Handle special commands directly + if message.strip().lower() in ("clear", "/clear"): + self.action_clear_chat() + return + + # Let the command handler process all /agent commands + # result will be handled by the command handler directly through messaging system + if message.strip().startswith("/agent"): + # The command handler will emit messages directly to our messaging system + handle_command(message.strip()) + # Agent manager will automatically use the latest agent + return + + # Handle exit commands + if message.strip().lower() in ("/exit", "/quit"): + self.add_system_message("Goodbye!") + # Exit the application + self.app.exit() + return + + # Use the existing command handler + # The command handler directly uses the messaging system, so we don't need to capture stdout + try: + result = handle_command(message.strip()) + if not result: + self.add_system_message(f"Unknown command: {message}") + except Exception as e: + self.add_error_message(f"Error executing command: {str(e)}") + return + + # Process with agent + try: + self.update_agent_progress("Processing", 25) + + # Use agent_manager's run_with_mcp to handle MCP servers properly + try: + agent = get_current_agent() + self.update_agent_progress("Processing", 50) + result = await agent.run_with_mcp( + message, + ) + + if not result or not hasattr(result, "output"): + self.add_error_message("Invalid response format from agent") + return + + self.update_agent_progress("Processing", 75) + agent_response = result.output + self.add_agent_message(agent_response) + + # Auto-save session if enabled (mirror --interactive) + try: + from code_puppy.config import auto_save_session_if_enabled + auto_save_session_if_enabled() + except Exception: + pass + + # Refresh history display to show new interaction + self.refresh_history_display() + + except Exception as eg: + # Handle TaskGroup and other exceptions + # BaseExceptionGroup is only available in Python 3.11+ + if hasattr(eg, "exceptions"): + # Handle TaskGroup exceptions specifically (Python 3.11+) + for e in eg.exceptions: + self.add_error_message(f"MCP/Agent error: {str(e)}") + else: + # Handle regular exceptions + self.add_error_message(f"MCP/Agent error: {str(eg)}") + finally: + pass + except Exception as agent_error: + # Handle any other errors in agent processing + self.add_error_message(f"Agent processing failed: {str(agent_error)}") + + except Exception as e: + self.add_error_message(f"Error processing message: {str(e)}") + finally: + self.agent_busy = False + self._update_submit_cancel_button(False) + self.stop_agent_progress() + + # Action methods + def action_clear_chat(self) -> None: + """Clear the chat history.""" + chat_view = self.query_one("#chat-view", ChatView) + chat_view.clear_messages() + agent = get_current_agent() + agent.clear_message_history() + self.add_system_message("Chat history cleared") + + def action_show_help(self) -> None: + """Show help information in a modal.""" + self.push_screen(HelpScreen()) + + def action_toggle_sidebar(self) -> None: + """Toggle sidebar visibility.""" + sidebar = self.query_one(Sidebar) + sidebar.display = not sidebar.display + + # If sidebar is now visible, focus the history list to enable immediate keyboard navigation + if sidebar.display: + try: + # Ensure history tab is active + tabs = self.query_one("#sidebar-tabs") + tabs.active = "history-tab" + + # Refresh the command history + sidebar.load_command_history() + + # Focus the history list + history_list = self.query_one("#history-list", ListView) + history_list.focus() + + # If the list has items, get the first item for the modal + if len(history_list.children) > 0: + # Reset sidebar's internal index tracker to 0 + sidebar.current_history_index = 0 + + # Set ListView index to match + history_list.index = 0 + + # Get the first item and show the command history modal + first_item = history_list.children[0] + if hasattr(first_item, "command_entry"): + # command_entry = first_item.command_entry + + # Use call_after_refresh to allow UI to update first + def show_modal(): + from .components.command_history_modal import ( + CommandHistoryModal, + ) + + # Get all command entries from the history list + command_entries = [] + for i, child in enumerate(history_list.children): + if hasattr(child, "command_entry"): + command_entries.append(child.command_entry) + + # Push the modal screen + # The modal will get the command entries from the sidebar + self.push_screen(CommandHistoryModal()) + + # Schedule modal to appear after UI refresh + self.call_after_refresh(show_modal) + except Exception as e: + # Log the exception in debug mode but silently fail for end users + import logging + + logging.debug(f"Error focusing history item: {str(e)}") + pass + else: + # If sidebar is now hidden, focus the input field for a smooth workflow + try: + self.action_focus_input() + except Exception: + # Silently fail if there's an issue with focusing + pass + + def action_focus_input(self) -> None: + """Focus the input field.""" + input_field = self.query_one("#input-field", CustomTextArea) + input_field.focus() + + def focus_input_field(self) -> None: + """Focus the input field (used for auto-focus on startup).""" + try: + input_field = self.query_one("#input-field", CustomTextArea) + input_field.focus() + except Exception: + pass # Silently handle if widget not ready yet + + def action_focus_chat(self) -> None: + """Focus the chat area.""" + chat_view = self.query_one("#chat-view", ChatView) + chat_view.focus() + + def action_show_tools(self) -> None: + """Show the tools modal.""" + self.push_screen(ToolsScreen()) + + def action_open_settings(self) -> None: + """Open the settings configuration screen.""" + + def handle_settings_result(result): + if result and result.get("success"): + # Update reactive variables + from code_puppy.config import get_global_model_name, get_puppy_name + + self.puppy_name = get_puppy_name() + + # Handle model change if needed + if result.get("model_changed"): + new_model = get_global_model_name() + self.current_model = new_model + try: + current_agent = get_current_agent() + current_agent.reload_code_generation_agent() + except Exception as reload_error: + self.add_error_message( + f"Failed to reload agent after model change: {reload_error}" + ) + + # Update status bar + status_bar = self.query_one(StatusBar) + status_bar.puppy_name = self.puppy_name + status_bar.current_model = self.current_model + + # Show success message + self.add_system_message(result.get("message", "Settings updated")) + elif ( + result + and not result.get("success") + and "cancelled" not in result.get("message", "").lower() + ): + # Show error message (but not for cancellation) + self.add_error_message(result.get("message", "Settings update failed")) + + self.push_screen(SettingsScreen(), handle_settings_result) + + def action_open_mcp_wizard(self) -> None: + """Open the MCP Install Wizard.""" + + def handle_wizard_result(result): + if result and result.get("success"): + # Show success message + self.add_system_message( + result.get("message", "MCP server installed successfully") + ) + + # If a server was installed, suggest starting it + if result.get("server_name"): + server_name = result["server_name"] + self.add_system_message( + f"💡 Use '/mcp start {server_name}' to start the server" + ) + elif ( + result + and not result.get("success") + and "cancelled" not in result.get("message", "").lower() + ): + # Show error message (but not for cancellation) + self.add_error_message(result.get("message", "MCP installation failed")) + + self.push_screen(MCPInstallWizardScreen(), handle_wizard_result) + + def process_initial_command(self) -> None: + """Process the initial command provided when starting the TUI.""" + if self.initial_command: + # Add the initial command to the input field + input_field = self.query_one("#input-field", CustomTextArea) + input_field.text = self.initial_command + + # Show that we're auto-executing the initial command + self.add_system_message( + f"🚀 Auto-executing initial command: {self.initial_command}" + ) + + # Automatically submit the message + self.action_send_message() + + def show_history_details(self, history_entry: dict) -> None: + """Show detailed information about a selected history entry.""" + try: + timestamp = history_entry.get("timestamp", "Unknown time") + description = history_entry.get("description", "No description") + output = history_entry.get("output", "") + awaiting_input = history_entry.get("awaiting_user_input", False) + + # Parse timestamp for better display with safe parsing + def parse_timestamp_safely_for_details(timestamp_str: str) -> str: + """Parse timestamp string safely for detailed display.""" + try: + # Handle 'Z' suffix (common UTC format) + cleaned_timestamp = timestamp_str.replace("Z", "+00:00") + parsed_dt = datetime.fromisoformat(cleaned_timestamp) + + # If the datetime is naive (no timezone), assume UTC + if parsed_dt.tzinfo is None: + parsed_dt = parsed_dt.replace(tzinfo=timezone.utc) + + return parsed_dt.strftime("%Y-%m-%d %H:%M:%S") + except (ValueError, AttributeError, TypeError): + # Handle invalid timestamp formats gracefully + return timestamp_str + + formatted_time = parse_timestamp_safely_for_details(timestamp) + + # Create detailed view content + details = [ + f"Timestamp: {formatted_time}", + f"Description: {description}", + "", + ] + + if output: + details.extend( + [ + "Output:", + "─" * 40, + output, + "", + ] + ) + + if awaiting_input: + details.append("⚠️ Was awaiting user input") + + # Display details as a system message in the chat + detail_text = "\\n".join(details) + self.add_system_message(f"History Details:\\n{detail_text}") + + except Exception as e: + self.add_error_message(f"Failed to show history details: {e}") + + # Progress and status methods + def set_agent_status(self, status: str, show_progress: bool = False) -> None: + """Update agent status and optionally show/hide progress bar.""" + try: + # Update status bar + status_bar = self.query_one(StatusBar) + status_bar.agent_status = status + + # Update spinner visibility + from .components.input_area import SimpleSpinnerWidget + + spinner = self.query_one("#spinner", SimpleSpinnerWidget) + if show_progress: + spinner.add_class("visible") + spinner.display = True + spinner.start_spinning() + else: + spinner.remove_class("visible") + spinner.display = False + spinner.stop_spinning() + + except Exception: + pass # Silently fail if widgets not available + + def start_agent_progress(self, initial_status: str = "Thinking") -> None: + """Start showing agent progress indicators.""" + self.set_agent_status(initial_status, show_progress=True) + + def update_agent_progress(self, status: str, progress: int = None) -> None: + """Update agent progress during processing.""" + try: + status_bar = self.query_one(StatusBar) + status_bar.agent_status = status + # Note: LoadingIndicator doesn't use progress values, it just spins + except Exception: + pass + + def stop_agent_progress(self) -> None: + """Stop showing agent progress indicators.""" + self.set_agent_status("Ready", show_progress=False) + + def on_resize(self, event: Resize) -> None: + """Handle terminal resize events to update responsive elements.""" + try: + # Apply responsive layout adjustments + self.apply_responsive_layout() + + # Update status bar to reflect new width + status_bar = self.query_one(StatusBar) + status_bar.update_status() + + # Refresh history display with new responsive truncation + self.refresh_history_display() + + except Exception: + pass # Silently handle resize errors + + def apply_responsive_layout(self) -> None: + """Apply responsive layout adjustments based on terminal size.""" + try: + terminal_width = self.size.width if hasattr(self, "size") else 80 + terminal_height = self.size.height if hasattr(self, "size") else 24 + sidebar = self.query_one(Sidebar) + + # Responsive sidebar width based on terminal width + if terminal_width >= 120: + sidebar.styles.width = 35 + elif terminal_width >= 100: + sidebar.styles.width = 30 + elif terminal_width >= 80: + sidebar.styles.width = 25 + elif terminal_width >= 60: + sidebar.styles.width = 20 + else: + sidebar.styles.width = 15 + + # Auto-hide sidebar on very narrow terminals + if terminal_width < 50: + if sidebar.display: + sidebar.display = False + self.add_system_message( + "💡 Sidebar auto-hidden for narrow terminal. Press Ctrl+2 to toggle." + ) + + # Adjust input area height for very short terminals + if terminal_height < 20: + input_area = self.query_one(InputArea) + input_area.styles.height = 7 + else: + input_area = self.query_one(InputArea) + input_area.styles.height = 9 + + except Exception: + pass + + def start_message_renderer_sync(self): + """Synchronous wrapper to start message renderer via run_worker.""" + self.run_worker(self.start_message_renderer(), exclusive=False) + + async def preload_agent_on_startup(self) -> None: + """Preload the agent/model at startup so loading status is visible.""" + try: + # Show loading in status bar and spinner + self.start_agent_progress("Loading") + + # Warm up agent/model without blocking UI + import asyncio + + from code_puppy.agents.agent_manager import get_current_agent + + agent = get_current_agent() + + # Run the synchronous reload in a worker thread + await asyncio.to_thread(agent.reload_code_generation_agent) + + # After load, refresh current model (in case of fallback or changes) + from code_puppy.config import get_global_model_name + + self.current_model = get_global_model_name() + + # Let the user know model/agent are ready + self.add_system_message("Model and agent preloaded. Ready to roll 🛼") + except Exception as e: + # Surface any preload issues but keep app usable + self.add_error_message(f"Startup preload failed: {e}") + finally: + # Always stop spinner and set ready state + self.stop_agent_progress() + + async def start_message_renderer(self): + """Start the message renderer to consume messages from the queue.""" + if not self._renderer_started: + self._renderer_started = True + + # Process any buffered startup messages first + from io import StringIO + + from rich.console import Console + + from code_puppy.messaging import get_buffered_startup_messages + + buffered_messages = get_buffered_startup_messages() + + if buffered_messages: + # Group startup messages into a single display + startup_content_lines = [] + + for message in buffered_messages: + try: + # Convert message content to string for grouping + if hasattr(message.content, "__rich_console__"): + # For Rich objects, render to plain text + string_io = StringIO() + # Use markup=False to prevent interpretation of square brackets as markup + temp_console = Console( + file=string_io, + width=80, + legacy_windows=False, + markup=False, + ) + temp_console.print(message.content) + content_str = string_io.getvalue().rstrip("\n") + else: + content_str = str(message.content) + + startup_content_lines.append(content_str) + except Exception as e: + startup_content_lines.append( + f"Error processing startup message: {e}" + ) + + # Create a single grouped startup message (tightened) + grouped_content = "\n".join(startup_content_lines) + self.add_system_message(self._tighten_text(grouped_content)) + + # Clear the startup buffer after processing + self.message_queue.clear_startup_buffer() + + # Now start the regular message renderer + await self.message_renderer.start() + + async def maybe_prompt_restore_autosave(self) -> None: + """Offer to restore an autosave session at startup (TUI version).""" + try: + import asyncio + from pathlib import Path + + from code_puppy.config import AUTOSAVE_DIR, set_current_autosave_from_session_name + from code_puppy.session_storage import list_sessions, load_session + + base_dir = Path(AUTOSAVE_DIR) + sessions = list_sessions(base_dir) + if not sessions: + return + + # Show modal picker for selection + from .screens.autosave_picker import AutosavePicker + + async def handle_result(result_name: str | None): + if not result_name: + return + try: + # Load history and set into agent + from code_puppy.agents.agent_manager import get_current_agent + + history = load_session(result_name, base_dir) + agent = get_current_agent() + agent.set_message_history(history) + + # Set current autosave session id so subsequent autosaves overwrite this session + try: + set_current_autosave_from_session_name(result_name) + except Exception: + pass + + # Update token info/status bar + total_tokens = sum( + agent.estimate_tokens_for_message(msg) for msg in history + ) + try: + status_bar = self.query_one(StatusBar) + status_bar.update_token_info( + total_tokens, + agent.get_model_context_length(), + total_tokens / max(1, agent.get_model_context_length()), + ) + except Exception: + pass + + # Notify + session_path = base_dir / f"{result_name}.pkl" + self.add_system_message( + f"✅ Autosave loaded: {len(history)} messages ({total_tokens} tokens)\n" + f"📁 From: {session_path}" + ) + + # Refresh history sidebar + self.refresh_history_display() + except Exception as e: + self.add_error_message(f"Failed to load autosave: {e}") + + # Push modal and await result + picker = AutosavePicker(base_dir) + + # Use Textual's push_screen with a result callback + def on_picker_result(result_name=None): + # Schedule async handler to avoid blocking UI + import asyncio + self.run_worker(handle_result(result_name), exclusive=False) + + self.push_screen(picker, on_picker_result) + except Exception as e: + # Fail silently but show debug in chat + self.add_system_message(f"[dim]Autosave prompt error: {e}[/dim]") + + async def stop_message_renderer(self): + """Stop the message renderer.""" + if self._renderer_started: + self._renderer_started = False + try: + await self.message_renderer.stop() + except Exception as e: + # Log renderer stop errors but don't crash + self.add_system_message(f"Renderer stop error: {e}") + + @on(HistoryEntrySelected) + def on_history_entry_selected(self, event: HistoryEntrySelected) -> None: + """Handle selection of a history entry from the sidebar.""" + # Display the history entry details + self.show_history_details(event.history_entry) + + @on(CommandSelected) + def on_command_selected(self, event: CommandSelected) -> None: + """Handle selection of a command from the history modal.""" + # Set the command in the input field + input_field = self.query_one("#input-field", CustomTextArea) + input_field.text = event.command + + # Focus the input field for immediate editing + input_field.focus() + + # Close the sidebar automatically for a smoother workflow + sidebar = self.query_one(Sidebar) + sidebar.display = False + + async def on_unmount(self): + """Clean up when the app is unmounted.""" + try: + # Unregister the agent reload callback + from code_puppy.callbacks import unregister_callback + + unregister_callback("agent_reload", self._on_agent_reload) + + await self.stop_message_renderer() + except Exception as e: + # Log unmount errors but don't crash during cleanup + try: + self.add_system_message(f"Unmount cleanup error: {e}") + except Exception: + # If we can't even add a message, just ignore + pass + + +async def run_textual_ui(initial_command: str = None): + """Run the Textual UI interface.""" + # Always enable YOLO mode in TUI mode for a smoother experience + from code_puppy.config import set_config_value + + # Initialize the command history file + initialize_command_history_file() + + set_config_value("yolo_mode", "true") + + app = CodePuppyTUI(initial_command=initial_command) + await app.run_async() diff --git a/code_puppy/tui/components/__init__.py b/code_puppy/tui/components/__init__.py new file mode 100644 index 00000000..96b21996 --- /dev/null +++ b/code_puppy/tui/components/__init__.py @@ -0,0 +1,21 @@ +""" +TUI components package. +""" + +from .chat_view import ChatView +from .copy_button import CopyButton +from .custom_widgets import CustomTextArea +from .input_area import InputArea, SimpleSpinnerWidget, SubmitCancelButton +from .sidebar import Sidebar +from .status_bar import StatusBar + +__all__ = [ + "CustomTextArea", + "StatusBar", + "ChatView", + "CopyButton", + "InputArea", + "SimpleSpinnerWidget", + "SubmitCancelButton", + "Sidebar", +] diff --git a/code_puppy/tui/components/chat_view.py b/code_puppy/tui/components/chat_view.py new file mode 100644 index 00000000..30603675 --- /dev/null +++ b/code_puppy/tui/components/chat_view.py @@ -0,0 +1,551 @@ +""" +Chat view component for displaying conversation history. +""" + +import re +from typing import List + +from rich.console import Group +from rich.markdown import Markdown +from rich.syntax import Syntax +from rich.text import Text +from textual import on +from textual.containers import Vertical, VerticalScroll +from textual.widgets import Static + +from ..models import ChatMessage, MessageType +from .copy_button import CopyButton + + +class ChatView(VerticalScroll): + """Main chat interface displaying conversation history.""" + + DEFAULT_CSS = """ + ChatView { + background: $background; + scrollbar-background: $primary; + scrollbar-color: $accent; + margin: 0 0 1 0; + padding: 0; + } + + .user-message { + background: $primary-darken-3; + color: #ffffff; + margin: 0 0 1 0; + margin-top: 0; + padding: 1; + padding-top: 1; + text-wrap: wrap; + border: none; + border-left: thick $accent; + text-style: bold; + } + + .agent-message { + background: transparent; + color: #f3f4f6; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .system-message { + background: transparent; + color: #d1d5db; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-style: italic; + text-wrap: wrap; + border: none; + } + + .error-message { + background: transparent; + color: #fef2f2; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .agent_reasoning-message { + background: transparent; + color: #f3e8ff; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + text-style: italic; + border: none; + } + + .planned_next_steps-message { + background: transparent; + color: #f3e8ff; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + text-style: italic; + border: none; + } + + .agent_response-message { + background: transparent; + color: #f3e8ff; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .info-message { + background: transparent; + color: #d1fae5; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .success-message { + background: #0d9488; + color: #d1fae5; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .warning-message { + background: #d97706; + color: #fef3c7; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .tool_output-message { + background: #5b21b6; + color: #dbeafe; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .command_output-message { + background: #9a3412; + color: #fed7aa; + margin: 0 0 1 0; + margin-top: 0; + padding: 0; + padding-top: 0; + text-wrap: wrap; + border: none; + } + + .message-container { + margin: 0 0 1 0; + padding: 0; + width: 1fr; + } + + .copy-button-container { + margin: 0 0 1 0; + padding: 0 1; + width: 1fr; + height: auto; + align: left top; + } + + /* Ensure first message has no top spacing */ + ChatView > *:first-child { + margin-top: 0; + padding-top: 0; + } + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.messages: List[ChatMessage] = [] + self.message_groups: dict = {} # Track groups for visual grouping + self.group_widgets: dict = {} # Track widgets by group_id for enhanced grouping + self._scroll_pending = False # Track if scroll is already scheduled + + def _render_agent_message_with_syntax(self, prefix: str, content: str): + """Render agent message with proper syntax highlighting for code blocks.""" + # Split content by code blocks + parts = re.split(r"(```[\s\S]*?```)", content) + rendered_parts = [] + + # Add prefix as the first part + rendered_parts.append(Text(prefix, style="bold")) + + for i, part in enumerate(parts): + if part.startswith("```") and part.endswith("```"): + # This is a code block + lines = part.strip("`").split("\n") + if lines: + # First line might contain language identifier + language = lines[0].strip() if lines[0].strip() else "text" + code_content = "\n".join(lines[1:]) if len(lines) > 1 else "" + + if code_content.strip(): + # Create syntax highlighted code + try: + syntax = Syntax( + code_content, + language, + theme="github-dark", + background_color="default", + line_numbers=True, + word_wrap=True, + ) + rendered_parts.append(syntax) + except Exception: + # Fallback to plain text if syntax highlighting fails + rendered_parts.append(Text(part)) + else: + rendered_parts.append(Text(part)) + else: + rendered_parts.append(Text(part)) + else: + # Regular text + if part.strip(): + rendered_parts.append(Text(part)) + + return Group(*rendered_parts) + + def _append_to_existing_group(self, message: ChatMessage) -> None: + """Append a message to an existing group by group_id.""" + if message.group_id not in self.group_widgets: + # If group doesn't exist, fall back to normal message creation + return + + # Find the most recent message in this group to append to + group_widgets = self.group_widgets[message.group_id] + if not group_widgets: + return + + # Get the last widget entry for this group + last_entry = group_widgets[-1] + last_message = last_entry["message"] + last_widget = last_entry["widget"] + copy_button = last_entry.get("copy_button") + + # Create a separator for different message types in the same group + if message.type != last_message.type: + separator = "\n" + "─" * 40 + "\n" + else: + separator = "\n" + + # Handle content concatenation carefully to preserve Rich objects + if hasattr(last_message.content, "__rich_console__") or hasattr( + message.content, "__rich_console__" + ): + # If either content is a Rich object, convert both to text and concatenate + from io import StringIO + + from rich.console import Console + + # Convert existing content to string + if hasattr(last_message.content, "__rich_console__"): + string_io = StringIO() + temp_console = Console( + file=string_io, width=80, legacy_windows=False, markup=False + ) + temp_console.print(last_message.content) + existing_content = string_io.getvalue().rstrip("\n") + else: + existing_content = str(last_message.content) + + # Convert new content to string + if hasattr(message.content, "__rich_console__"): + string_io = StringIO() + temp_console = Console( + file=string_io, width=80, legacy_windows=False, markup=False + ) + temp_console.print(message.content) + new_content = string_io.getvalue().rstrip("\n") + else: + new_content = str(message.content) + + # Combine as plain text + last_message.content = existing_content + separator + new_content + else: + # Both are strings, safe to concatenate + last_message.content += separator + message.content + + # Update the widget based on message type + if last_message.type == MessageType.AGENT_RESPONSE: + # Re-render agent response with updated content + prefix = "AGENT RESPONSE:\n" + try: + md = Markdown(last_message.content) + header = Text(prefix, style="bold") + group_content = Group(header, md) + last_widget.update(group_content) + except Exception: + full_content = f"{prefix}{last_message.content}" + last_widget.update(Text(full_content)) + + # Update the copy button if it exists + if copy_button: + copy_button.update_text_to_copy(last_message.content) + else: + # Handle other message types + # After the content concatenation above, content is always a string + # Try to parse markup when safe to do so + try: + # Try to parse as markup first - this handles rich styling correctly + last_widget.update(Text.from_markup(last_message.content)) + except Exception: + # If markup parsing fails, fall back to plain text + # This handles cases where content contains literal square brackets + last_widget.update(Text(last_message.content)) + + # Add the new message to our tracking lists + self.messages.append(message) + if message.group_id in self.message_groups: + self.message_groups[message.group_id].append(message) + + # Auto-scroll to bottom with refresh to fix scroll bar issues (debounced) + self._schedule_scroll() + + def add_message(self, message: ChatMessage) -> None: + """Add a new message to the chat view.""" + # Enhanced grouping: check if we can append to ANY existing group + if message.group_id is not None and message.group_id in self.group_widgets: + self._append_to_existing_group(message) + return + + # Old logic for consecutive grouping (keeping as fallback) + if ( + message.group_id is not None + and self.messages + and self.messages[-1].group_id == message.group_id + ): + # This case should now be handled by _append_to_existing_group above + # but keeping for safety + self._append_to_existing_group(message) + return + + # Add to messages list + self.messages.append(message) + + # Track groups for potential future use + if message.group_id: + if message.group_id not in self.message_groups: + self.message_groups[message.group_id] = [] + self.message_groups[message.group_id].append(message) + + # Create the message widget + css_class = f"{message.type.value}-message" + + if message.type == MessageType.USER: + # Add user indicator and make it stand out + content_lines = message.content.split("\n") + if len(content_lines) > 1: + # Multi-line user message + formatted_content = f"╔══ USER ══╗\n{message.content}\n╚══════════╝" + else: + # Single line user message + formatted_content = f"▶ USER: {message.content}" + + message_widget = Static(Text(formatted_content), classes=css_class) + # User messages are not collapsible - mount directly + self.mount(message_widget) + # Auto-scroll to bottom + self._schedule_scroll() + return + elif message.type == MessageType.AGENT: + prefix = "AGENT: " + content = f"{message.content}" + message_widget = Static( + Text.from_markup(message.content), classes=css_class + ) + # Try to render markup + try: + message_widget = Static(Text.from_markup(content), classes=css_class) + except Exception: + message_widget = Static(Text(content), classes=css_class) + + elif message.type == MessageType.SYSTEM: + # Check if content is a Rich object (like Markdown) + if hasattr(message.content, "__rich_console__"): + # Render Rich objects directly (like Markdown) + message_widget = Static(message.content, classes=css_class) + else: + content = f"{message.content}" + # Try to render markup + try: + message_widget = Static( + Text.from_markup(content), classes=css_class + ) + except Exception: + message_widget = Static(Text(content), classes=css_class) + + elif message.type == MessageType.AGENT_REASONING: + prefix = "AGENT REASONING:\n" + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.PLANNED_NEXT_STEPS: + prefix = "PLANNED NEXT STEPS:\n" + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.AGENT_RESPONSE: + prefix = "AGENT RESPONSE:\n" + content = message.content + + try: + # First try to render as markdown with proper syntax highlighting + md = Markdown(content) + # Create a group with the header and markdown content + header = Text(prefix, style="bold") + group_content = Group(header, md) + message_widget = Static(group_content, classes=css_class) + except Exception: + # If markdown parsing fails, fall back to simple text display + full_content = f"{prefix}{content}" + message_widget = Static(Text(full_content), classes=css_class) + + # Try to create copy button - use simpler approach + try: + # Create copy button for agent responses + copy_button = CopyButton(content) # Copy the raw content without prefix + + # Mount the message first + self.mount(message_widget) + + # Then mount the copy button directly + self.mount(copy_button) + + # Track both the widget and copy button for group-based updates + if message.group_id: + if message.group_id not in self.group_widgets: + self.group_widgets[message.group_id] = [] + self.group_widgets[message.group_id].append( + { + "message": message, + "widget": message_widget, + "copy_button": copy_button, + } + ) + + # Auto-scroll to bottom with refresh to fix scroll bar issues (debounced) + self._schedule_scroll() + return # Early return only if copy button creation succeeded + + except Exception as e: + # If copy button creation fails, fall back to normal message display + # Log the error but don't let it prevent the message from showing + import sys + + print(f"Warning: Copy button creation failed: {e}", file=sys.stderr) + # Continue to normal message mounting below + elif message.type == MessageType.INFO: + prefix = "INFO: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.SUCCESS: + prefix = "SUCCESS: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.WARNING: + prefix = "WARNING: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.TOOL_OUTPUT: + prefix = "TOOL OUTPUT: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + elif message.type == MessageType.COMMAND_OUTPUT: + prefix = "COMMAND: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + else: # ERROR and fallback + prefix = "Error: " if message.type == MessageType.ERROR else "Unknown: " + content = f"{prefix}{message.content}" + message_widget = Static(Text(content), classes=css_class) + + self.mount(message_widget) + + # Track the widget for group-based updates + if message.group_id: + if message.group_id not in self.group_widgets: + self.group_widgets[message.group_id] = [] + self.group_widgets[message.group_id].append( + { + "message": message, + "widget": message_widget, + "copy_button": None, # Will be set if created + } + ) + + # Auto-scroll to bottom with refresh to fix scroll bar issues (debounced) + self._schedule_scroll() + + def clear_messages(self) -> None: + """Clear all messages from the chat view.""" + self.messages.clear() + self.message_groups.clear() # Clear groups too + self.group_widgets.clear() # Clear widget tracking too + # Remove all message widgets (Static widgets, CopyButtons, and any Vertical containers) + for widget in self.query(Static): + widget.remove() + for widget in self.query(CopyButton): + widget.remove() + for widget in self.query(Vertical): + widget.remove() + + @on(CopyButton.CopyCompleted) + def on_copy_completed(self, event: CopyButton.CopyCompleted) -> None: + """Handle copy button completion events.""" + if event.success: + # Could add a temporary success message or visual feedback + # For now, the button itself provides visual feedback + pass + else: + # Show error message in chat if copy failed + from datetime import datetime, timezone + + error_message = ChatMessage( + id=f"copy_error_{datetime.now(timezone.utc).timestamp()}", + type=MessageType.ERROR, + content=f"Failed to copy to clipboard: {event.error}", + timestamp=datetime.now(timezone.utc), + ) + self.add_message(error_message) + + def _schedule_scroll(self) -> None: + """Schedule a scroll operation, avoiding duplicate calls.""" + if not self._scroll_pending: + self._scroll_pending = True + self.call_after_refresh(self._do_scroll) + + def _do_scroll(self) -> None: + """Perform the actual scroll operation.""" + self._scroll_pending = False + self.scroll_end(animate=False) diff --git a/code_puppy/tui/components/command_history_modal.py b/code_puppy/tui/components/command_history_modal.py new file mode 100644 index 00000000..ebf15759 --- /dev/null +++ b/code_puppy/tui/components/command_history_modal.py @@ -0,0 +1,218 @@ +""" +Modal component for displaying command history entries. +""" + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, Horizontal +from textual.events import Key +from textual.screen import ModalScreen +from textual.widgets import Button, Label, Static + +from ..messages import CommandSelected + + +class CommandHistoryModal(ModalScreen): + """Modal for displaying a command history entry.""" + + def __init__(self, **kwargs): + """Initialize the modal with command history data. + + Args: + **kwargs: Additional arguments to pass to the parent class + """ + super().__init__(**kwargs) + + # Get the current command from the sidebar + try: + # We'll get everything from the sidebar on demand + self.sidebar = None + self.command = "" + self.timestamp = "" + except Exception: + self.command = "" + self.timestamp = "" + + # UI components to update + self.command_display = None + self.timestamp_display = None + + def on_mount(self) -> None: + """Setup when the modal is mounted.""" + # Get the sidebar and current command entry + try: + self.sidebar = self.app.query_one("Sidebar") + current_entry = self.sidebar.get_current_command_entry() + self.command = current_entry["command"] + self.timestamp = current_entry["timestamp"] + self.update_display() + except Exception as e: + import logging + + logging.debug(f"Error initializing modal: {str(e)}") + + DEFAULT_CSS = """ + CommandHistoryModal { + align: center middle; + } + + #modal-container { + width: 80%; + max-width: 100; + /* Set a definite height that's large enough but fits on screen */ + height: 22; /* Increased height to make room for navigation hint */ + min-height: 18; + background: $surface; + border: solid $primary; + /* Increase vertical padding to add more space between elements */ + padding: 1 2; + /* Use vertical layout to ensure proper element sizing */ + layout: vertical; + } + + #timestamp-display { + width: 100%; + margin-bottom: 1; + color: $text-muted; + text-align: right; + /* Fix the height */ + height: 1; + margin-top: 0; + } + + #command-display { + width: 100%; + /* Allow this container to grow/shrink as needed but keep buttons visible */ + min-height: 3; + height: 1fr; + max-height: 12; + padding: 0 1; + margin-bottom: 1; + margin-top: 1; + background: $surface-darken-1; + border: solid $primary-darken-2; + overflow: auto; + } + + #nav-hint { + width: 100%; + color: $text; + text-align: center; + margin: 1 0; + } + + .button-container { + width: 100%; + /* Fix the height to ensure buttons are always visible */ + height: 3; + align-horizontal: right; + margin-top: 1; + } + + Button { + margin-right: 1; + } + + #use-button { + background: $success; + } + + #cancel-button { + background: $primary-darken-1; + } + """ + + def compose(self) -> ComposeResult: + """Create the modal layout.""" + with Container(id="modal-container"): + # Header with timestamp + self.timestamp_display = Label( + f"Timestamp: {self.timestamp}", id="timestamp-display" + ) + yield self.timestamp_display + + # Scrollable content area that can expand/contract as needed + # The content will scroll if it's too long, ensuring buttons remain visible + with Container(id="command-display"): + self.command_display = Static(self.command) + yield self.command_display + + # Super simple navigation hint + yield Label("Press Up/Down arrows to navigate history", id="nav-hint") + + # Fixed button container at the bottom + with Horizontal(classes="button-container"): + yield Button("Cancel", id="cancel-button", variant="default") + yield Button("Use Command", id="use-button", variant="primary") + + def on_key(self, event: Key) -> None: + """Handle key events for navigation.""" + # Handle arrow keys for navigation + if event.key == "down": + self.navigate_to_next_command() + event.prevent_default() + elif event.key == "up": + self.navigate_to_previous_command() + event.prevent_default() + elif event.key == "escape": + self.app.pop_screen() + event.prevent_default() + + def navigate_to_next_command(self) -> None: + """Navigate to the next command in history.""" + try: + # Get the sidebar + if not self.sidebar: + self.sidebar = self.app.query_one("Sidebar") + + # Use sidebar's method to navigate + if self.sidebar.navigate_to_next_command(): + # Get updated command entry + current_entry = self.sidebar.get_current_command_entry() + self.command = current_entry["command"] + self.timestamp = current_entry["timestamp"] + self.update_display() + except Exception as e: + # Log the error but don't crash + import logging + + logging.debug(f"Error navigating to next command: {str(e)}") + + def navigate_to_previous_command(self) -> None: + """Navigate to the previous command in history.""" + try: + # Get the sidebar + if not self.sidebar: + self.sidebar = self.app.query_one("Sidebar") + + # Use sidebar's method to navigate + if self.sidebar.navigate_to_previous_command(): + # Get updated command entry + current_entry = self.sidebar.get_current_command_entry() + self.command = current_entry["command"] + self.timestamp = current_entry["timestamp"] + self.update_display() + except Exception as e: + # Log the error but don't crash + import logging + + logging.debug(f"Error navigating to previous command: {str(e)}") + + def update_display(self) -> None: + """Update the display with the current command and timestamp.""" + if self.command_display: + self.command_display.update(self.command) + if self.timestamp_display: + self.timestamp_display.update(f"Timestamp: {self.timestamp}") + + @on(Button.Pressed, "#use-button") + def use_command(self) -> None: + """Handle use button press.""" + # Post a message to the app with the selected command + self.post_message(CommandSelected(self.command)) + self.app.pop_screen() + + @on(Button.Pressed, "#cancel-button") + def cancel(self) -> None: + """Handle cancel button press.""" + self.app.pop_screen() diff --git a/code_puppy/tui/components/copy_button.py b/code_puppy/tui/components/copy_button.py new file mode 100644 index 00000000..54395ecf --- /dev/null +++ b/code_puppy/tui/components/copy_button.py @@ -0,0 +1,139 @@ +""" +Copy button component for copying agent responses to clipboard. +""" + +import subprocess +import sys +from typing import Optional + +from textual.binding import Binding +from textual.events import Click +from textual.message import Message +from textual.widgets import Button + + +class CopyButton(Button): + """A button that copies associated text to the clipboard.""" + + DEFAULT_CSS = """ + CopyButton { + width: auto; + height: 3; + min-width: 8; + margin: 0 1 1 1; + padding: 0 1; + background: $primary; + color: $text; + border: none; + text-align: center; + } + + CopyButton:hover { + background: $accent; + color: $text; + } + + CopyButton:focus { + background: $accent; + color: $text; + text-style: bold; + } + + CopyButton.-pressed { + background: $success; + color: $text; + } + """ + + BINDINGS = [ + Binding("enter", "press", "Copy", show=False), + Binding("space", "press", "Copy", show=False), + ] + + def __init__(self, text_to_copy: str, **kwargs): + super().__init__("📋 Copy", **kwargs) + self.text_to_copy = text_to_copy + self._original_label = "📋 Copy" + self._copied_label = "✅ Copied!" + + class CopyCompleted(Message): + """Message sent when text is successfully copied.""" + + def __init__(self, success: bool, error: Optional[str] = None): + super().__init__() + self.success = success + self.error = error + + def copy_to_clipboard(self, text: str) -> tuple[bool, Optional[str]]: + """ + Copy text to clipboard using platform-appropriate method. + + Returns: + tuple: (success: bool, error_message: Optional[str]) + """ + try: + if sys.platform == "darwin": # macOS + subprocess.run( + ["pbcopy"], input=text, text=True, check=True, capture_output=True + ) + elif sys.platform == "win32": # Windows + subprocess.run( + ["clip"], input=text, text=True, check=True, capture_output=True + ) + else: # Linux and other Unix-like systems + # Try xclip first, then xsel as fallback + try: + subprocess.run( + ["xclip", "-selection", "clipboard"], + input=text, + text=True, + check=True, + capture_output=True, + ) + except (subprocess.CalledProcessError, FileNotFoundError): + # Fallback to xsel + subprocess.run( + ["xsel", "--clipboard", "--input"], + input=text, + text=True, + check=True, + capture_output=True, + ) + + return True, None + + except subprocess.CalledProcessError as e: + return False, f"Clipboard command failed: {e}" + except FileNotFoundError: + if sys.platform not in ["darwin", "win32"]: + return ( + False, + "Clipboard utilities not found. Please install xclip or xsel.", + ) + else: + return False, "System clipboard command not found." + except Exception as e: + return False, f"Unexpected error: {e}" + + def on_click(self, event: Click) -> None: + """Handle button click to copy text.""" + self.action_press() + + def action_press(self) -> None: + """Copy the text to clipboard and provide visual feedback.""" + success, error = self.copy_to_clipboard(self.text_to_copy) + + if success: + # Visual feedback - change button text temporarily + self.label = self._copied_label + self.add_class("-pressed") + + # Reset button appearance after a short delay + # self.set_timer(1.5, self._reset_button_appearance) + + # Send message about copy operation + self.post_message(self.CopyCompleted(success, error)) + + def update_text_to_copy(self, new_text: str) -> None: + """Update the text that will be copied when button is pressed.""" + self.text_to_copy = new_text diff --git a/code_puppy/tui/components/custom_widgets.py b/code_puppy/tui/components/custom_widgets.py new file mode 100644 index 00000000..c3752f26 --- /dev/null +++ b/code_puppy/tui/components/custom_widgets.py @@ -0,0 +1,63 @@ +""" +Custom widget components for the TUI. +""" + +from textual.binding import Binding +from textual.events import Key +from textual.message import Message +from textual.widgets import TextArea + + +class CustomTextArea(TextArea): + """Custom TextArea that sends a message with Enter and allows new lines with Shift+Enter.""" + + # Define key bindings + BINDINGS = [ + Binding("alt+enter", "insert_newline", ""), + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def on_key(self, event): + """Handle key events before they reach the internal _on_key handler.""" + # Let the binding system handle alt+enter + if event.key == "alt+enter": + # Don't prevent default - let the binding system handle it + return + + # Handle escape+enter manually + if event.key == "escape+enter": + self.action_insert_newline() + event.prevent_default() + event.stop() + return + + def _on_key(self, event: Key) -> None: + """Override internal key handler to intercept Enter keys.""" + # Handle Enter key specifically + if event.key == "enter": + # Check if this key is part of an escape sequence (Alt+Enter) + if hasattr(event, "is_cursor_sequence") or ( + hasattr(event, "meta") and event.meta + ): + # If it's part of an escape sequence, let the parent handle it + # so that bindings can process it + super()._on_key(event) + return + + # This handles plain Enter only, not escape+enter + self.post_message(self.MessageSent()) + return # Don't call super() to prevent default newline behavior + + # Let TextArea handle other keys + super()._on_key(event) + + def action_insert_newline(self) -> None: + """Action to insert a new line - called by shift+enter and escape+enter bindings.""" + self.insert("\n") + + class MessageSent(Message): + """Message sent when Enter key is pressed (without Shift).""" + + pass diff --git a/code_puppy/tui/components/human_input_modal.py b/code_puppy/tui/components/human_input_modal.py new file mode 100644 index 00000000..c03e4878 --- /dev/null +++ b/code_puppy/tui/components/human_input_modal.py @@ -0,0 +1,175 @@ +""" +Modal component for human input requests. +""" + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, Horizontal +from textual.events import Key +from textual.screen import ModalScreen +from textual.widgets import Button, Static, TextArea + +try: + from .custom_widgets import CustomTextArea +except ImportError: + # Fallback to regular TextArea if CustomTextArea isn't available + CustomTextArea = TextArea + + +class HumanInputModal(ModalScreen): + """Modal for requesting human input.""" + + def __init__(self, prompt_text: str, prompt_id: str, **kwargs): + """Initialize the modal with prompt information. + + Args: + prompt_text: The prompt to display to the user + prompt_id: Unique identifier for this prompt request + **kwargs: Additional arguments to pass to the parent class + """ + super().__init__(**kwargs) + self.prompt_text = prompt_text + self.prompt_id = prompt_id + self.response = "" + print(f"[DEBUG] Created HumanInputModal for prompt_id: {prompt_id}") + + DEFAULT_CSS = """ + HumanInputModal { + align: center middle; + } + + #modal-container { + width: 80%; + max-width: 80; + height: 16; + min-height: 12; + background: $surface; + border: solid $primary; + padding: 1 2; + layout: vertical; + } + + #prompt-display { + width: 100%; + margin-bottom: 1; + color: $text; + text-align: left; + height: auto; + max-height: 6; + overflow: auto; + } + + #input-container { + width: 100%; + height: 4; + margin-bottom: 1; + } + + #response-input { + width: 100%; + height: 4; + border: solid $primary; + background: $surface-darken-1; + } + + #button-container { + width: 100%; + height: 3; + align: center bottom; + layout: horizontal; + } + + #submit-button, #cancel-button { + width: auto; + height: 3; + margin: 0 1; + min-width: 10; + } + + #hint-text { + width: 100%; + color: $text-muted; + text-align: center; + height: 1; + margin-top: 1; + } + """ + + def compose(self) -> ComposeResult: + """Create the modal layout.""" + with Container(id="modal-container"): + yield Static(self.prompt_text, id="prompt-display") + with Container(id="input-container"): + yield CustomTextArea("", id="response-input") + with Horizontal(id="button-container"): + yield Button("Submit", id="submit-button", variant="primary") + yield Button("Cancel", id="cancel-button", variant="default") + yield Static("Enter to submit • Escape to cancel", id="hint-text") + + def on_mount(self) -> None: + """Focus the input field when modal opens.""" + try: + print("[DEBUG] Modal on_mount called") + input_field = self.query_one("#response-input", CustomTextArea) + input_field.focus() + print("[DEBUG] Modal input field focused") + except Exception as e: + print(f"[DEBUG] Modal on_mount exception: {e}") + import traceback + + traceback.print_exc() + + @on(Button.Pressed, "#submit-button") + def on_submit_clicked(self) -> None: + """Handle submit button click.""" + self._submit_response() + + @on(Button.Pressed, "#cancel-button") + def on_cancel_clicked(self) -> None: + """Handle cancel button click.""" + self._cancel_response() + + def on_key(self, event: Key) -> None: + """Handle key events.""" + if event.key == "escape": + self._cancel_response() + event.prevent_default() + elif event.key == "enter": + # Check if we're in the text area and it's not multi-line + try: + input_field = self.query_one("#response-input", CustomTextArea) + if input_field.has_focus and "\n" not in input_field.text: + self._submit_response() + event.prevent_default() + except Exception: + pass + + def _submit_response(self) -> None: + """Submit the user's response.""" + try: + input_field = self.query_one("#response-input", CustomTextArea) + self.response = input_field.text.strip() + print(f"[DEBUG] Modal submitting response: {self.response[:20]}...") + + # Provide the response back to the message queue + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(self.prompt_id, self.response) + + # Close the modal using the same method as other modals + self.app.pop_screen() + except Exception as e: + print(f"[DEBUG] Modal error during submit: {e}") + # If something goes wrong, provide empty response + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(self.prompt_id, "") + self.app.pop_screen() + + def _cancel_response(self) -> None: + """Cancel the input request.""" + print("[DEBUG] Modal cancelling response") + from code_puppy.messaging import provide_prompt_response + + provide_prompt_response(self.prompt_id, "") + self.app.pop_screen() diff --git a/code_puppy/tui/components/input_area.py b/code_puppy/tui/components/input_area.py new file mode 100644 index 00000000..bb7c9d06 --- /dev/null +++ b/code_puppy/tui/components/input_area.py @@ -0,0 +1,167 @@ +""" +Input area component for message input. +""" + +from textual.app import ComposeResult +from textual.containers import Container, Horizontal +from textual.message import Message +from textual.reactive import reactive +from textual.widgets import Button, Static + +from code_puppy.messaging.spinner import TextualSpinner + +from .custom_widgets import CustomTextArea + +# Alias SimpleSpinnerWidget to TextualSpinner for backward compatibility +SimpleSpinnerWidget = TextualSpinner + + +class SubmitCancelButton(Button): + """A button that toggles between submit and cancel states.""" + + is_cancel_mode = reactive(False) + + DEFAULT_CSS = """ + SubmitCancelButton { + width: 3; + min-width: 3; + height: 3; + content-align: center middle; + border: none; + background: $surface; + } + + SubmitCancelButton:focus { + border: none; + color: $surface; + background: $surface; + } + + SubmitCancelButton:hover { + border: none; + background: $surface; + } + """ + + def __init__(self, **kwargs): + super().__init__("▶️", **kwargs) + self.id = "submit-cancel-button" + + def watch_is_cancel_mode(self, is_cancel: bool) -> None: + """Update the button label when cancel mode changes.""" + self.label = "⏹️" if is_cancel else "▶️" + + def on_click(self) -> None: + """Handle click event and bubble it up to parent.""" + # When clicked, send a ButtonClicked message that will be handled by the parent + self.post_message(self.Clicked(self)) + + class Clicked(Message): + """Button was clicked.""" + + def __init__(self, button: "SubmitCancelButton") -> None: + self.is_cancel_mode = button.is_cancel_mode + super().__init__() + + +class InputArea(Container): + """Input area with text input, spinner, help text, and send button.""" + + DEFAULT_CSS = """ + InputArea { + dock: bottom; + height: 9; + margin: 1; + } + + #spinner { + height: 1; + width: 1fr; + margin: 0 3 0 1; + content-align: left middle; + text-align: left; + display: none; + } + + #spinner.visible { + display: block; + } + + #input-container { + height: 5; + width: 1fr; + margin: 1 3 0 1; + align: center middle; + } + + #input-field { + height: 5; + width: 1fr; + border: round $primary; + background: $surface; + } + + #submit-cancel-button { + height: 3; + width: 3; + min-width: 3; + margin: 1 0 1 1; + content-align: center middle; + border: none; + background: $surface; + } + + #input-help { + height: 1; + width: 1fr; + margin: 0 3 1 1; + color: $text-muted; + text-align: center; + } + """ + + def on_mount(self) -> None: + """Initialize the button state based on the app's agent_busy state.""" + app = self.app + if hasattr(app, "agent_busy"): + button = self.query_one(SubmitCancelButton) + button.is_cancel_mode = app.agent_busy + + def compose(self) -> ComposeResult: + yield SimpleSpinnerWidget(id="spinner") + with Horizontal(id="input-container"): + yield CustomTextArea(id="input-field", show_line_numbers=False) + yield SubmitCancelButton() + yield Static( + "Enter to send • Shift+Enter for new line • Ctrl+1 for help", + id="input-help", + ) + + def on_submit_cancel_button_clicked( + self, event: SubmitCancelButton.Clicked + ) -> None: + """Handle button clicks based on current mode.""" + if event.is_cancel_mode: + # Cancel mode - stop the current process + self.post_message(self.CancelRequested()) + else: + # Submit mode - send the message + self.post_message(self.SubmitRequested()) + + # Return focus to the input field + self.app.call_after_refresh(self.focus_input_field) + + def focus_input_field(self) -> None: + """Focus the input field after button click.""" + input_field = self.query_one("#input-field") + input_field.focus() + + class SubmitRequested(Message): + """Request to submit the current input.""" + + pass + + class CancelRequested(Message): + """Request to cancel the current process.""" + + pass diff --git a/code_puppy/tui/components/sidebar.py b/code_puppy/tui/components/sidebar.py new file mode 100644 index 00000000..c6b12f08 --- /dev/null +++ b/code_puppy/tui/components/sidebar.py @@ -0,0 +1,309 @@ +""" +Sidebar component with history tab. +""" + +import time + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container +from textual.events import Key +from textual.widgets import Label, ListItem, ListView, TabbedContent, TabPane + +from ..components.command_history_modal import CommandHistoryModal + +# Import the shared message class and history reader +from ..models.command_history import HistoryFileReader + + +class Sidebar(Container): + """Sidebar with session history.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + # Double-click detection variables + self._last_click_time = 0 + self._last_clicked_item = None + self._double_click_threshold = 0.5 # 500ms for double-click + + # Initialize history reader + self.history_reader = HistoryFileReader() + + # Current index for history navigation - centralized reference + self.current_history_index = 0 + self.history_entries = [] + + DEFAULT_CSS = """ + Sidebar { + dock: left; + width: 30; + min-width: 20; + max-width: 50; + background: $surface; + border-right: solid $primary; + display: none; + } + + #sidebar-tabs { + height: 1fr; + } + + #history-list { + height: 1fr; + } + + .history-interactive { + color: #34d399; + } + + .history-tui { + color: #60a5fa; + } + + .history-system { + color: #fbbf24; + text-style: italic; + } + + .history-command { + /* Use default text color from theme */ + } + + .history-generic { + color: #d1d5db; + } + + .history-empty { + color: #6b7280; + text-style: italic; + } + + .history-error { + color: #ef4444; + } + + .file-item { + color: #d1d5db; + } + """ + + def compose(self) -> ComposeResult: + """Create the sidebar layout with tabs.""" + with TabbedContent(id="sidebar-tabs"): + with TabPane("📜 History", id="history-tab"): + yield ListView(id="history-list") + + def on_mount(self) -> None: + """Initialize the sidebar when mounted.""" + # Set up event handlers for keyboard interaction + history_list = self.query_one("#history-list", ListView) + + # Add a class to make it focusable + history_list.can_focus = True + + # Load command history + self.load_command_history() + + @on(ListView.Highlighted) + def on_list_highlighted(self, event: ListView.Highlighted) -> None: + """Handle highlighting of list items to ensure they can be selected.""" + # This ensures the item gets focus when highlighted by arrow keys + if event.list_view.id == "history-list": + event.list_view.focus() + # Sync the current_history_index with the ListView index to fix modal sync issue + self.current_history_index = event.list_view.index + + @on(ListView.Selected) + def on_list_selected(self, event: ListView.Selected) -> None: + """Handle selection of list items (including mouse clicks). + + Implements double-click detection to allow users to retrieve history items + by either pressing ENTER or double-clicking with the mouse. + """ + if event.list_view.id == "history-list": + current_time = time.time() + selected_item = event.item + + # Check if this is a double-click + if ( + selected_item == self._last_clicked_item + and current_time - self._last_click_time <= self._double_click_threshold + and hasattr(selected_item, "command_entry") + ): + # Double-click detected! Show command in modal + # Find the index of this item + history_list = self.query_one("#history-list", ListView) + self.current_history_index = history_list.index + + # Push the modal screen - it will get data from the sidebar + self.app.push_screen(CommandHistoryModal()) + + # Reset click tracking to prevent triple-click issues + self._last_click_time = 0 + self._last_clicked_item = None + else: + # Single click - just update tracking + self._last_click_time = current_time + self._last_clicked_item = selected_item + + @on(Key) + def on_key(self, event: Key) -> None: + """Handle key events for the sidebar.""" + # Handle Enter key on the history list + if event.key == "enter": + history_list = self.query_one("#history-list", ListView) + if ( + history_list.has_focus + and history_list.highlighted_child + and hasattr(history_list.highlighted_child, "command_entry") + ): + # Show command details in modal + # Update the current history index to match this item + self.current_history_index = history_list.index + + # Push the modal screen - it will get data from the sidebar + self.app.push_screen(CommandHistoryModal()) + + # Stop propagation + event.stop() + event.prevent_default() + + def load_command_history(self) -> None: + """Load command history from file into the history list.""" + try: + # Clear existing items + history_list = self.query_one("#history-list", ListView) + history_list.clear() + + # Get command history entries (limit to last 50) + entries = self.history_reader.read_history(max_entries=50) + + # Filter out CLI-specific commands that aren't relevant for TUI + cli_commands = { + "/help", + "/exit", + "/m", + "/motd", + "/show", + "/set", + "/tools", + } + filtered_entries = [] + for entry in entries: + command = entry.get("command", "").strip() + # Skip CLI commands but keep everything else + if not any(command.startswith(cli_cmd) for cli_cmd in cli_commands): + filtered_entries.append(entry) + + # Store filtered entries centrally + self.history_entries = filtered_entries + + # Reset history index + self.current_history_index = 0 + + if not filtered_entries: + # No history available (after filtering) + history_list.append( + ListItem(Label("No command history", classes="history-empty")) + ) + return + + # Add filtered entries to the list (most recent first) + for entry in filtered_entries: + timestamp = entry["timestamp"] + command = entry["command"] + + # Format timestamp for display + time_display = self.history_reader.format_timestamp(timestamp) + + # Truncate command for display if needed + display_text = command + if len(display_text) > 60: + display_text = display_text[:57] + "..." + + # Create list item + label = Label( + f"[{time_display}] {display_text}", classes="history-command" + ) + list_item = ListItem(label) + list_item.command_entry = entry + history_list.append(list_item) + + # Focus on the most recent command (first in the list) + if len(history_list.children) > 0: + history_list.index = 0 + # Sync the current_history_index to match the ListView index + self.current_history_index = 0 + + # Note: We don't automatically show the modal here when just loading the history + # That will be handled by the app's action_toggle_sidebar method + # This ensures the modal only appears when explicitly opening the sidebar, not during refresh + + except Exception as e: + # Add error item + history_list = self.query_one("#history-list", ListView) + history_list.clear() + history_list.append( + ListItem( + Label(f"Error loading history: {str(e)}", classes="history-error") + ) + ) + + def navigate_to_next_command(self) -> bool: + """Navigate to the next command in history. + + Returns: + bool: True if navigation succeeded, False otherwise + """ + if ( + not self.history_entries + or self.current_history_index >= len(self.history_entries) - 1 + ): + return False + + # Increment the index + self.current_history_index += 1 + + # Update the listview selection + try: + history_list = self.query_one("#history-list", ListView) + if history_list and self.current_history_index < len(history_list.children): + history_list.index = self.current_history_index + except Exception: + pass + + return True + + def navigate_to_previous_command(self) -> bool: + """Navigate to the previous command in history. + + Returns: + bool: True if navigation succeeded, False otherwise + """ + if not self.history_entries or self.current_history_index <= 0: + return False + + # Decrement the index + self.current_history_index -= 1 + + # Update the listview selection + try: + history_list = self.query_one("#history-list", ListView) + if history_list and self.current_history_index >= 0: + history_list.index = self.current_history_index + except Exception: + pass + + return True + + def get_current_command_entry(self) -> dict: + """Get the current command entry based on the current index. + + Returns: + dict: The current command entry or empty dict if not available + """ + if self.history_entries and 0 <= self.current_history_index < len( + self.history_entries + ): + return self.history_entries[self.current_history_index] + return {"command": "", "timestamp": ""} diff --git a/code_puppy/tui/components/status_bar.py b/code_puppy/tui/components/status_bar.py new file mode 100644 index 00000000..c277464b --- /dev/null +++ b/code_puppy/tui/components/status_bar.py @@ -0,0 +1,185 @@ +""" +Status bar component for the TUI. +""" + +import os + +from rich.text import Text +from textual.app import ComposeResult +from textual.reactive import reactive +from textual.widgets import Static + + +class StatusBar(Static): + """Status bar showing current model, puppy name, and connection status.""" + + DEFAULT_CSS = """ + StatusBar { + dock: top; + height: 1; + background: $primary; + color: $text; + text-align: right; + padding: 0 1; + } + + #status-content { + text-align: right; + width: 100%; + } + """ + + current_model = reactive("") + puppy_name = reactive("") + connection_status = reactive("Connected") + agent_status = reactive("Ready") + progress_visible = reactive(False) + token_count = reactive(0) + token_capacity = reactive(0) + token_proportion = reactive(0.0) + + def compose(self) -> ComposeResult: + yield Static(id="status-content") + + def watch_current_model(self) -> None: + self.update_status() + + def watch_puppy_name(self) -> None: + self.update_status() + + def watch_connection_status(self) -> None: + self.update_status() + + def watch_agent_status(self) -> None: + self.update_status() + + def watch_token_count(self) -> None: + self.update_status() + + def watch_token_capacity(self) -> None: + self.update_status() + + def watch_token_proportion(self) -> None: + self.update_status() + + def watch_progress_visible(self) -> None: + self.update_status() + + def update_status(self) -> None: + """Update the status bar content with responsive design.""" + status_widget = self.query_one("#status-content", Static) + + # Get current working directory + cwd = os.getcwd() + cwd_short = os.path.basename(cwd) if cwd != "/" else "/" + + # Add agent status indicator with different colors + if self.agent_status == "Thinking": + status_indicator = "🤔" + status_color = "yellow" + elif self.agent_status == "Processing": + status_indicator = "⚡" + status_color = "blue" + elif self.agent_status == "Busy": + status_indicator = "🔄" + status_color = "orange" + elif self.agent_status == "Loading": + status_indicator = "⏳" + status_color = "cyan" + else: # Ready or anything else + status_indicator = "✅" + status_color = "green" + + # Get terminal width for responsive content + try: + terminal_width = self.app.size.width if hasattr(self.app, "size") else 80 + except Exception: + terminal_width = 80 + + # Create responsive status text based on terminal width + rich_text = Text() + + # Token status with color coding + token_status = "" + token_color = "green" + if self.token_count > 0 and self.token_capacity > 0: + # Import here to avoid circular import + from code_puppy.config import get_compaction_threshold + + get_compaction_threshold = get_compaction_threshold() + + if self.token_proportion > get_compaction_threshold: + token_color = "red" + token_status = f"🔴 {self.token_count}/{self.token_capacity} ({self.token_proportion:.1%})" + elif self.token_proportion > ( + get_compaction_threshold - 0.15 + ): # 15% before summarization threshold + token_color = "yellow" + token_status = f"🟡 {self.token_count}/{self.token_capacity} ({self.token_proportion:.1%})" + else: + token_color = "green" + token_status = f"🟢 {self.token_count}/{self.token_capacity} ({self.token_proportion:.1%})" + + if terminal_width >= 140: + # Extra wide - show full path and all info including tokens + rich_text.append( + f"📁 {cwd} | 🐶 {self.puppy_name} | Model: {self.current_model} | " + ) + if token_status: + rich_text.append(f"{token_status} | ", style=token_color) + rich_text.append( + f"{status_indicator} {self.agent_status}", style=status_color + ) + elif terminal_width >= 100: + # Full status display for wide terminals + rich_text.append( + f"📁 {cwd_short} | 🐶 {self.puppy_name} | Model: {self.current_model} | " + ) + rich_text.append( + f"{status_indicator} {self.agent_status}", style=status_color + ) + elif terminal_width >= 120: + # Medium display - shorten model name if needed + model_display = ( + self.current_model[:15] + "..." + if len(self.current_model) > 18 + else self.current_model + ) + rich_text.append( + f"📁 {cwd_short} | 🐶 {self.puppy_name} | {model_display} | " + ) + if token_status: + rich_text.append(f"{token_status} | ", style=token_color) + rich_text.append( + f"{status_indicator} {self.agent_status}", style=status_color + ) + elif terminal_width >= 60: + # Compact display - use abbreviations + puppy_short = ( + self.puppy_name[:8] + "..." + if len(self.puppy_name) > 10 + else self.puppy_name + ) + model_short = ( + self.current_model[:12] + "..." + if len(self.current_model) > 15 + else self.current_model + ) + rich_text.append(f"📁 {cwd_short} | 🐶 {puppy_short} | {model_short} | ") + rich_text.append(f"{status_indicator}", style=status_color) + else: + # Minimal display for very narrow terminals + cwd_mini = cwd_short[:8] + "..." if len(cwd_short) > 10 else cwd_short + rich_text.append(f"📁 {cwd_mini} | ") + rich_text.append(f"{status_indicator}", style=status_color) + + rich_text.justify = "right" + status_widget.update(rich_text) + + def update_token_info( + self, current_tokens: int, max_tokens: int, proportion: float + ) -> None: + """Update token information in the status bar.""" + self.token_count = current_tokens + self.token_capacity = max_tokens + self.token_proportion = proportion diff --git a/code_puppy/tui/messages.py b/code_puppy/tui/messages.py new file mode 100644 index 00000000..962752ad --- /dev/null +++ b/code_puppy/tui/messages.py @@ -0,0 +1,27 @@ +""" +Custom message classes for TUI components. +""" + +from textual.message import Message + + +class HistoryEntrySelected(Message): + """Message sent when a history entry is selected from the sidebar.""" + + def __init__(self, history_entry: dict) -> None: + """Initialize with the history entry data.""" + self.history_entry = history_entry + super().__init__() + + +class CommandSelected(Message): + """Message sent when a command is selected from the history modal.""" + + def __init__(self, command: str) -> None: + """Initialize with the command text. + + Args: + command: The command text that was selected + """ + self.command = command + super().__init__() diff --git a/code_puppy/tui/models/__init__.py b/code_puppy/tui/models/__init__.py new file mode 100644 index 00000000..22948775 --- /dev/null +++ b/code_puppy/tui/models/__init__.py @@ -0,0 +1,8 @@ +""" +TUI models package. +""" + +from .chat_message import ChatMessage +from .enums import MessageType + +__all__ = ["MessageType", "ChatMessage"] diff --git a/code_puppy/tui/models/chat_message.py b/code_puppy/tui/models/chat_message.py new file mode 100644 index 00000000..35534800 --- /dev/null +++ b/code_puppy/tui/models/chat_message.py @@ -0,0 +1,25 @@ +""" +Chat message data model. +""" + +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Dict + +from .enums import MessageType + + +@dataclass +class ChatMessage: + """Represents a message in the chat interface.""" + + id: str + type: MessageType + content: str + timestamp: datetime + metadata: Dict[str, Any] = None + group_id: str = None + + def __post_init__(self): + if self.metadata is None: + self.metadata = {} diff --git a/code_puppy/tui/models/command_history.py b/code_puppy/tui/models/command_history.py new file mode 100644 index 00000000..f8948d64 --- /dev/null +++ b/code_puppy/tui/models/command_history.py @@ -0,0 +1,89 @@ +""" +Command history reader for TUI history tab. +""" + +import os +import re +from datetime import datetime +from typing import Dict, List + +from code_puppy.config import COMMAND_HISTORY_FILE + + +class HistoryFileReader: + """Reads and parses the command history file for display in the TUI history tab.""" + + def __init__(self, history_file_path: str = COMMAND_HISTORY_FILE): + """Initialize the history file reader. + + Args: + history_file_path: Path to the command history file. Defaults to the standard location. + """ + self.history_file_path = history_file_path + self._timestamp_pattern = re.compile( + r"^# (\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})" + ) + + def read_history(self, max_entries: int = 100) -> List[Dict[str, str]]: + """Read command history from the history file. + + Args: + max_entries: Maximum number of entries to read. Defaults to 100. + + Returns: + List of history entries with timestamp and command, most recent first. + """ + if not os.path.exists(self.history_file_path): + return [] + + try: + with open(self.history_file_path, "r") as f: + content = f.read() + + # Split content by timestamp marker + raw_chunks = re.split(r"(# \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})", content) + + # Filter out empty chunks + chunks = [chunk for chunk in raw_chunks if chunk.strip()] + + entries = [] + + # Process chunks in pairs (timestamp and command) + i = 0 + while i < len(chunks) - 1: + if self._timestamp_pattern.match(chunks[i]): + timestamp = self._timestamp_pattern.match(chunks[i]).group(1) + command_text = chunks[i + 1].strip() + + if command_text: # Skip empty commands + entries.append( + {"timestamp": timestamp, "command": command_text} + ) + + i += 2 + else: + # Skip invalid chunks + i += 1 + + # Limit the number of entries and reverse to get most recent first + return entries[-max_entries:][::-1] + + except Exception: + # Return empty list on any error + return [] + + def format_timestamp(self, timestamp: str, format_str: str = "%H:%M:%S") -> str: + """Format a timestamp string for display. + + Args: + timestamp: ISO format timestamp string (YYYY-MM-DDThh:mm:ss) + format_str: Format string for datetime.strftime + + Returns: + Formatted timestamp string + """ + try: + dt = datetime.fromisoformat(timestamp) + return dt.strftime(format_str) + except (ValueError, TypeError): + return timestamp diff --git a/code_puppy/tui/models/enums.py b/code_puppy/tui/models/enums.py new file mode 100644 index 00000000..1a2185ce --- /dev/null +++ b/code_puppy/tui/models/enums.py @@ -0,0 +1,24 @@ +""" +Enums for the TUI module. +""" + +from enum import Enum + + +class MessageType(Enum): + """Types of messages in the chat interface.""" + + USER = "user" + AGENT = "agent" + SYSTEM = "system" + ERROR = "error" + DIVIDER = "divider" + INFO = "info" + SUCCESS = "success" + WARNING = "warning" + TOOL_OUTPUT = "tool_output" + COMMAND_OUTPUT = "command_output" + + AGENT_REASONING = "agent_reasoning" + PLANNED_NEXT_STEPS = "planned_next_steps" + AGENT_RESPONSE = "agent_response" diff --git a/code_puppy/tui/screens/__init__.py b/code_puppy/tui/screens/__init__.py new file mode 100644 index 00000000..a477a9ea --- /dev/null +++ b/code_puppy/tui/screens/__init__.py @@ -0,0 +1,17 @@ +""" +TUI screens package. +""" + +from .help import HelpScreen +from .mcp_install_wizard import MCPInstallWizardScreen +from .settings import SettingsScreen +from .tools import ToolsScreen +from .autosave_picker import AutosavePicker + +__all__ = [ + "HelpScreen", + "SettingsScreen", + "ToolsScreen", + "MCPInstallWizardScreen", + "AutosavePicker", +] diff --git a/code_puppy/tui/screens/autosave_picker.py b/code_puppy/tui/screens/autosave_picker.py new file mode 100644 index 00000000..49e2e923 --- /dev/null +++ b/code_puppy/tui/screens/autosave_picker.py @@ -0,0 +1,166 @@ +""" +Autosave Picker modal for TUI. +Lists recent autosave sessions and lets the user load one. +""" +from __future__ import annotations + +import json +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import List, Optional, Tuple + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, Horizontal +from textual.screen import ModalScreen +from textual.widgets import Button, Label, ListItem, ListView, Static + +from code_puppy.session_storage import list_sessions + + +@dataclass(slots=True) +class AutosaveEntry: + name: str + timestamp: Optional[str] + message_count: Optional[int] + + +def _load_metadata(base_dir: Path, name: str) -> Tuple[Optional[str], Optional[int]]: + meta_path = base_dir / f"{name}_meta.json" + try: + with meta_path.open("r", encoding="utf-8") as meta_file: + data = json.load(meta_file) + return data.get("timestamp"), data.get("message_count") + except Exception: + return None, None + + +class AutosavePicker(ModalScreen): + """Modal to present available autosave sessions for selection.""" + + DEFAULT_CSS = """ + AutosavePicker { + align: center middle; + } + + #modal-container { + width: 80%; + max-width: 100; + height: 24; + min-height: 18; + background: $surface; + border: solid $primary; + padding: 1 2; + layout: vertical; + } + + #list-label { + width: 100%; + height: 1; + color: $text; + text-align: left; + } + + #autosave-list { + height: 1fr; + overflow: auto; + border: solid $primary-darken-2; + background: $surface-darken-1; + margin: 1 0; + } + + .button-row { + height: 3; + align-horizontal: right; + margin-top: 1; + } + + #cancel-button { background: $primary-darken-1; } + #load-button { background: $success; } + """ + + def __init__(self, autosave_dir: Path, **kwargs): + super().__init__(**kwargs) + self.autosave_dir = autosave_dir + self.entries: List[AutosaveEntry] = [] + self.list_view: Optional[ListView] = None + + def on_mount(self) -> None: + names = list_sessions(self.autosave_dir) + raw_entries: List[Tuple[str, Optional[str], Optional[int]]] = [] + for name in names: + ts, count = _load_metadata(self.autosave_dir, name) + raw_entries.append((name, ts, count)) + + def sort_key(entry): + _, ts, _ = entry + if ts: + try: + return datetime.fromisoformat(ts) + except ValueError: + return datetime.min + return datetime.min + + raw_entries.sort(key=sort_key, reverse=True) + self.entries = [AutosaveEntry(*e) for e in raw_entries] + + # Populate the ListView now that entries are ready + if self.list_view is None: + try: + self.list_view = self.query_one("#autosave-list", ListView) + except Exception: + self.list_view = None + + if self.list_view is not None: + # Clear existing items if any + try: + self.list_view.clear() + except Exception: + # Fallback: remove children manually + self.list_view.children.clear() # type: ignore + + for entry in self.entries[:50]: + ts = entry.timestamp or "unknown time" + count = f"{entry.message_count} msgs" if entry.message_count is not None else "unknown size" + label = f"{entry.name} — {count}, saved at {ts}" + self.list_view.append(ListItem(Static(label))) + + # Focus and select first item for better UX + if len(self.entries) > 0: + self.list_view.index = 0 + self.list_view.focus() + + def compose(self) -> ComposeResult: + with Container(id="modal-container"): + yield Label("Select an autosave to load (Esc to cancel)", id="list-label") + self.list_view = ListView(id="autosave-list") + # populate items + for entry in self.entries[:50]: # cap to avoid long lists + ts = entry.timestamp or "unknown time" + count = f"{entry.message_count} msgs" if entry.message_count is not None else "unknown size" + label = f"{entry.name} — {count}, saved at {ts}" + self.list_view.append(ListItem(Static(label))) + yield self.list_view + with Horizontal(classes="button-row"): + yield Button("Cancel", id="cancel-button") + yield Button("Load", id="load-button", variant="primary") + + @on(Button.Pressed, "#cancel-button") + def cancel(self) -> None: + self.dismiss(None) + + @on(Button.Pressed, "#load-button") + def load_selected(self) -> None: + if not self.list_view or not self.entries: + self.dismiss(None) + return + idx = self.list_view.index if self.list_view.index is not None else 0 + if 0 <= idx < len(self.entries): + self.dismiss(self.entries[idx].name) + else: + self.dismiss(None) + + def on_list_view_selected(self, event: ListView.Selected) -> None: # type: ignore + # Double-enter may select; we just map to load button + self.load_selected() diff --git a/code_puppy/tui/screens/help.py b/code_puppy/tui/screens/help.py new file mode 100644 index 00000000..03ef517e --- /dev/null +++ b/code_puppy/tui/screens/help.py @@ -0,0 +1,130 @@ +""" +Help modal screen. +""" + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, VerticalScroll +from textual.screen import ModalScreen +from textual.widgets import Button, Static + + +class HelpScreen(ModalScreen): + """Help modal screen.""" + + DEFAULT_CSS = """ + HelpScreen { + align: center middle; + } + + #help-dialog { + width: 80; + height: 30; + border: thick $primary; + background: $surface; + padding: 1; + } + + #help-content { + height: 1fr; + margin: 0 0 1 0; + overflow-y: auto; + } + + #help-buttons { + layout: horizontal; + height: 3; + align: center middle; + } + + #dismiss-button { + margin: 0 1; + } + """ + + def compose(self) -> ComposeResult: + with Container(id="help-dialog"): + yield Static("📚 Code Puppy TUI Help", id="help-title") + with VerticalScroll(id="help-content"): + yield Static(self.get_help_content(), id="help-text") + with Container(id="help-buttons"): + yield Button("Dismiss", id="dismiss-button", variant="primary") + + def get_help_content(self) -> str: + """Get the help content text.""" + try: + # Get terminal width for responsive help + terminal_width = self.app.size.width if hasattr(self.app, "size") else 80 + except Exception: + terminal_width = 80 + + if terminal_width < 60: + # Compact help for narrow terminals + return """ +Code Puppy TUI (Compact Mode): + +Controls: +- Enter: Send message +- Ctrl+Enter: New line +- Ctrl+Q: Quit +- Ctrl+2: Toggle History +- Ctrl+3: Settings +- Ctrl+4: Tools +- Ctrl+5: Focus prompt +- Ctrl+6: Focus response + +Use this help for full details. +""" + else: + # Full help text + return """ +Code Puppy TUI Help: + +Input Controls: +- Enter: Send message +- ALT+Enter: New line (multi-line input) +- Standard text editing shortcuts supported + +Keyboard Shortcuts: +- Ctrl+Q/Ctrl+C: Quit application +- Ctrl+L: Clear chat history +- Ctrl+1: Show this help +- Ctrl+2: Toggle History +- Ctrl+3: Open settings +- Ctrl+4: Tools +- Ctrl+5: Focus prompt (input field) +- Ctrl+6: Focus response (chat area) + +Chat Navigation: +- Ctrl+Up/Down: Scroll chat up/down +- Ctrl+Home: Scroll to top +- Ctrl+End: Scroll to bottom + +Commands: +- /clear: Clear chat history +- /m : Switch model +- /cd : Change directory +- /help: Show help +- /status: Show current status + +Use the input area at the bottom to type messages. +Press Ctrl+2 to view History when needed. +Agent responses support syntax highlighting for code blocks. +Press Ctrl+3 to access all configuration settings. + +Copy Feature: +- 📋 Copy buttons appear after agent responses +- Click or press Enter/Space on copy button to copy content +- Raw markdown content is copied to clipboard +- Visual feedback shows copy success/failure +""" + + @on(Button.Pressed, "#dismiss-button") + def dismiss_help(self) -> None: + """Dismiss the help modal.""" + self.dismiss() + + def on_key(self, event) -> None: + """Handle key events.""" + if event.key == "escape": + self.dismiss() diff --git a/code_puppy/tui/screens/mcp_install_wizard.py b/code_puppy/tui/screens/mcp_install_wizard.py new file mode 100644 index 00000000..aae3aca9 --- /dev/null +++ b/code_puppy/tui/screens/mcp_install_wizard.py @@ -0,0 +1,803 @@ +""" +MCP Install Wizard Screen - TUI interface for installing MCP servers. +""" + +import json +import os + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, Horizontal +from textual.screen import ModalScreen +from textual.widgets import Button, Input, ListItem, ListView, Static, TextArea + + +class MCPInstallWizardScreen(ModalScreen): + """Modal screen for installing MCP servers with full wizard support.""" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.selected_server = None + self.env_vars = {} + self.step = "search" # search -> configure -> install -> custom_json + self.search_counter = 0 # Counter to ensure unique IDs + self.custom_json_mode = False # Track if we're in custom JSON mode + + DEFAULT_CSS = """ + MCPInstallWizardScreen { + align: center middle; + } + + #wizard-container { + width: 90%; + max-width: 100; + height: 80%; + max-height: 40; + background: $surface; + border: solid $primary; + padding: 1 2; + layout: vertical; + } + + #wizard-header { + width: 100%; + height: 3; + text-align: center; + color: $accent; + margin-bottom: 1; + } + + #search-container { + width: 100%; + height: auto; + layout: vertical; + } + + #search-input { + width: 100%; + margin-bottom: 1; + border: solid $primary; + } + + #results-list { + width: 100%; + height: 20; + border: solid $primary; + margin-bottom: 1; + } + + #config-container { + width: 100%; + height: 1fr; + layout: vertical; + } + + #server-info { + width: 100%; + height: auto; + max-height: 8; + border: solid $success; + padding: 1; + margin-bottom: 1; + background: $surface-lighten-1; + } + + #env-vars-container { + width: 100%; + height: 1fr; + layout: vertical; + border: solid $warning; + padding: 1; + margin-bottom: 1; + overflow-y: scroll; + } + + #env-var-input { + width: 100%; + margin-bottom: 1; + border: solid $primary; + } + + #button-container { + width: 100%; + height: 4; + layout: horizontal; + align: center bottom; + } + + #back-button, #next-button, #install-button, #cancel-button { + width: auto; + height: 3; + margin: 0 1; + min-width: 12; + } + + .env-var-row { + width: 100%; + layout: horizontal; + height: 3; + margin-bottom: 1; + } + + .env-var-label { + width: 1fr; + padding: 1 0; + } + + .env-var-input { + width: 2fr; + border: solid $primary; + } + + #custom-json-container { + width: 100%; + height: 1fr; + layout: vertical; + display: none; + padding: 1; + } + + #custom-json-header { + width: 100%; + height: 2; + text-align: left; + color: $warning; + margin-bottom: 1; + } + + #custom-name-input { + width: 100%; + margin-bottom: 1; + border: solid $primary; + } + + #custom-json-input { + width: 100%; + height: 1fr; + border: solid $primary; + margin-bottom: 1; + background: $surface-darken-1; + } + + #custom-json-button { + width: auto; + height: 3; + margin: 0 1; + min-width: 14; + } + """ + + def compose(self) -> ComposeResult: + """Create the wizard layout.""" + with Container(id="wizard-container"): + yield Static("🔌 MCP Server Install Wizard", id="wizard-header") + + # Step 1: Search and select server + with Container(id="search-container"): + yield Input( + placeholder="Search MCP servers (e.g. 'github', 'postgres')...", + id="search-input", + ) + yield ListView(id="results-list") + + # Step 2: Configure server (hidden initially) + with Container(id="config-container"): + yield Static("Server Configuration", id="config-header") + yield Container(id="server-info") + yield Container(id="env-vars-container") + + # Step 3: Custom JSON configuration (hidden initially) + with Container(id="custom-json-container"): + yield Static("📝 Custom JSON Configuration", id="custom-json-header") + yield Input( + placeholder="Server name (e.g. 'my-sqlite-db')", + id="custom-name-input", + ) + yield TextArea(id="custom-json-input") + + # Navigation buttons + with Horizontal(id="button-container"): + yield Button("Cancel", id="cancel-button", variant="default") + yield Button("Back", id="back-button", variant="default") + yield Button("Custom JSON", id="custom-json-button", variant="warning") + yield Button("Next", id="next-button", variant="primary") + yield Button("Install", id="install-button", variant="success") + + def on_mount(self) -> None: + """Initialize the wizard.""" + self._show_search_step() + self._load_popular_servers() + + # Focus the search input + search_input = self.query_one("#search-input", Input) + search_input.focus() + + def _show_search_step(self) -> None: + """Show the search step.""" + self.step = "search" + self.custom_json_mode = False + self.query_one("#search-container").display = True + self.query_one("#config-container").display = False + self.query_one("#custom-json-container").display = False + + self.query_one("#back-button").display = False + self.query_one("#custom-json-button").display = True + self.query_one("#next-button").display = True + self.query_one("#install-button").display = False + + def _show_config_step(self) -> None: + """Show the configuration step.""" + self.step = "configure" + self.custom_json_mode = False + self.query_one("#search-container").display = False + self.query_one("#config-container").display = True + self.query_one("#custom-json-container").display = False + + self.query_one("#back-button").display = True + self.query_one("#custom-json-button").display = False + self.query_one("#next-button").display = False + self.query_one("#install-button").display = True + + self._setup_server_config() + + def _show_custom_json_step(self) -> None: + """Show the custom JSON configuration step.""" + self.step = "custom_json" + self.custom_json_mode = True + self.query_one("#search-container").display = False + self.query_one("#config-container").display = False + self.query_one("#custom-json-container").display = True + + self.query_one("#back-button").display = True + self.query_one("#custom-json-button").display = False + self.query_one("#next-button").display = False + self.query_one("#install-button").display = True + + # Pre-populate with SQLite example + name_input = self.query_one("#custom-name-input", Input) + name_input.value = "my-sqlite-db" + + json_input = self.query_one("#custom-json-input", TextArea) + json_input.text = """{ + "type": "stdio", + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-sqlite", "./database.db"], + "timeout": 30 +}""" + + # Focus the name input + name_input.focus() + + def _load_popular_servers(self) -> None: + """Load all available servers into the list.""" + self.search_counter += 1 + counter = self.search_counter + + try: + from code_puppy.mcp_.server_registry_catalog import catalog + + # Load ALL servers instead of just popular ones + servers = catalog.servers + + results_list = self.query_one("#results-list", ListView) + # Force clear by removing all children + results_list.remove_children() + + if servers: + # Sort servers to show popular and verified first + sorted_servers = sorted( + servers, + key=lambda s: (not s.popular, not s.verified, s.display_name), + ) + + for i, server in enumerate(sorted_servers): + indicators = [] + if server.verified: + indicators.append("✓") + if server.popular: + indicators.append("⭐") + + display_name = f"{server.display_name} {''.join(indicators)}" + description = ( + server.description[:60] + "..." + if len(server.description) > 60 + else server.description + ) + + item_text = f"{display_name}\n[dim]{description}[/dim]" + # Use counter to ensure globally unique IDs + item = ListItem(Static(item_text), id=f"item-{counter}-{i}") + item.server_data = server + results_list.append(item) + else: + no_servers_item = ListItem( + Static("No servers found"), id=f"no-results-{counter}" + ) + results_list.append(no_servers_item) + + except ImportError: + results_list = self.query_one("#results-list", ListView) + results_list.remove_children() + error_item = ListItem( + Static("[red]Server registry not available[/red]"), + id=f"error-{counter}", + ) + results_list.append(error_item) + + @on(Input.Changed, "#search-input") + def on_search_changed(self, event: Input.Changed) -> None: + """Handle search input changes.""" + query = event.value.strip() + + if not query: + self._load_popular_servers() # This now loads all servers + return + + self.search_counter += 1 + counter = self.search_counter + + try: + from code_puppy.mcp_.server_registry_catalog import catalog + + servers = catalog.search(query) + + results_list = self.query_one("#results-list", ListView) + # Force clear by removing all children + results_list.remove_children() + + if servers: + for i, server in enumerate(servers[:15]): # Limit results + indicators = [] + if server.verified: + indicators.append("✓") + if server.popular: + indicators.append("⭐") + + display_name = f"{server.display_name} {''.join(indicators)}" + description = ( + server.description[:60] + "..." + if len(server.description) > 60 + else server.description + ) + + item_text = f"{display_name}\n[dim]{description}[/dim]" + # Use counter to ensure globally unique IDs + item = ListItem(Static(item_text), id=f"item-{counter}-{i}") + item.server_data = server + results_list.append(item) + else: + no_results_item = ListItem( + Static(f"No servers found for '{query}'"), + id=f"no-results-{counter}", + ) + results_list.append(no_results_item) + + except ImportError: + results_list = self.query_one("#results-list", ListView) + results_list.remove_children() + error_item = ListItem( + Static("[red]Server registry not available[/red]"), + id=f"error-{counter}", + ) + results_list.append(error_item) + + @on(ListView.Selected, "#results-list") + def on_server_selected(self, event: ListView.Selected) -> None: + """Handle server selection.""" + if hasattr(event.item, "server_data"): + self.selected_server = event.item.server_data + + @on(Button.Pressed, "#next-button") + def on_next_clicked(self) -> None: + """Handle next button click.""" + if self.step == "search": + if self.selected_server: + self._show_config_step() + else: + # Show error - no server selected + pass + + @on(Button.Pressed, "#back-button") + def on_back_clicked(self) -> None: + """Handle back button click.""" + if self.step == "configure": + self._show_search_step() + elif self.step == "custom_json": + self._show_search_step() + + @on(Button.Pressed, "#custom-json-button") + def on_custom_json_clicked(self) -> None: + """Handle custom JSON button click.""" + self._show_custom_json_step() + + @on(Button.Pressed, "#install-button") + def on_install_clicked(self) -> None: + """Handle install button click.""" + if self.step == "configure" and self.selected_server: + self._install_server() + elif self.step == "custom_json": + self._install_custom_json() + + @on(Button.Pressed, "#cancel-button") + def on_cancel_clicked(self) -> None: + """Handle cancel button click.""" + self.dismiss({"success": False, "message": "Installation cancelled"}) + + def _setup_server_config(self) -> None: + """Setup the server configuration step.""" + if not self.selected_server: + return + + # Show server info + server_info = self.query_one("#server-info", Container) + server_info.remove_children() + + info_text = f"""[bold]{self.selected_server.display_name}[/bold] +{self.selected_server.description} + +[yellow]Category:[/yellow] {self.selected_server.category} +[yellow]Type:[/yellow] {getattr(self.selected_server, "type", "stdio")}""" + + # Show requirements summary + requirements = self.selected_server.get_requirements() + req_items = [] + if requirements.required_tools: + req_items.append(f"Tools: {', '.join(requirements.required_tools)}") + if requirements.environment_vars: + req_items.append(f"Env vars: {len(requirements.environment_vars)}") + if requirements.command_line_args: + req_items.append(f"Config args: {len(requirements.command_line_args)}") + + if req_items: + info_text += f"\n[yellow]Requirements:[/yellow] {' | '.join(req_items)}" + + server_info.mount(Static(info_text)) + + # Setup configuration requirements + config_container = self.query_one("#env-vars-container", Container) + config_container.remove_children() + config_container.mount(Static("[bold]Server Configuration:[/bold]")) + + # Add server name input + config_container.mount(Static("\n[bold blue]Server Name:[/bold blue]")) + name_row = Horizontal(classes="env-var-row") + config_container.mount(name_row) + name_row.mount(Static("🏷️ Custom name:", classes="env-var-label")) + name_input = Input( + placeholder=f"Default: {self.selected_server.name}", + value=self.selected_server.name, + classes="env-var-input", + id="server-name-input", + ) + name_row.mount(name_input) + + try: + # Check system requirements first + self._setup_system_requirements(config_container) + + # Setup environment variables + self._setup_environment_variables(config_container) + + # Setup command line arguments + self._setup_command_line_args(config_container) + + # Show package dependencies info + self._setup_package_dependencies(config_container) + + except Exception as e: + config_container.mount( + Static(f"[red]Error loading configuration: {e}[/red]") + ) + + def _setup_system_requirements(self, parent: Container) -> None: + """Setup system requirements validation.""" + required_tools = self.selected_server.get_required_tools() + + if not required_tools: + return + + parent.mount(Static("\n[bold cyan]System Tools:[/bold cyan]")) + + # Import here to avoid circular imports + from code_puppy.mcp_.system_tools import detector + + tool_status = detector.detect_tools(required_tools) + + for tool_name, tool_info in tool_status.items(): + if tool_info.available: + status_text = f"✅ {tool_name}" + if tool_info.version: + status_text += f" ({tool_info.version})" + parent.mount(Static(status_text)) + else: + status_text = f"❌ {tool_name} - {tool_info.error or 'Not found'}" + parent.mount(Static(f"[red]{status_text}[/red]")) + + # Show installation suggestions + suggestions = detector.get_installation_suggestions(tool_name) + if suggestions: + parent.mount(Static(f"[dim] Install: {suggestions[0]}[/dim]")) + + def _setup_environment_variables(self, parent: Container) -> None: + """Setup environment variables inputs.""" + env_vars = self.selected_server.get_environment_vars() + + if not env_vars: + return + + parent.mount(Static("\n[bold yellow]Environment Variables:[/bold yellow]")) + + for var in env_vars: + # Check if already set + import os + + current_value = os.environ.get(var, "") + + row_container = Horizontal(classes="env-var-row") + parent.mount(row_container) + + status_indicator = "✅" if current_value else "📝" + row_container.mount( + Static(f"{status_indicator} {var}:", classes="env-var-label") + ) + + env_input = Input( + placeholder=f"Enter {var} value..." + if not current_value + else "Already set", + value=current_value, + classes="env-var-input", + id=f"env-{var}", + ) + row_container.mount(env_input) + + def _setup_command_line_args(self, parent: Container) -> None: + """Setup command line arguments inputs.""" + cmd_args = self.selected_server.get_command_line_args() + + if not cmd_args: + return + + parent.mount(Static("\n[bold green]Command Line Arguments:[/bold green]")) + + for arg_config in cmd_args: + name = arg_config.get("name", "") + prompt = arg_config.get("prompt", name) + default = arg_config.get("default", "") + required = arg_config.get("required", True) + + row_container = Horizontal(classes="env-var-row") + parent.mount(row_container) + + indicator = "⚡" if required else "🔧" + label_text = f"{indicator} {prompt}:" + if not required: + label_text += " (optional)" + + row_container.mount(Static(label_text, classes="env-var-label")) + + arg_input = Input( + placeholder=f"Default: {default}" if default else f"Enter {name}...", + value=default, + classes="env-var-input", + id=f"arg-{name}", + ) + row_container.mount(arg_input) + + def _setup_package_dependencies(self, parent: Container) -> None: + """Setup package dependencies information.""" + packages = self.selected_server.get_package_dependencies() + + if not packages: + return + + parent.mount(Static("\n[bold magenta]Package Dependencies:[/bold magenta]")) + + # Import here to avoid circular imports + from code_puppy.mcp_.system_tools import detector + + package_status = detector.check_package_dependencies(packages) + + for package, available in package_status.items(): + if available: + parent.mount(Static(f"✅ {package} (installed)")) + else: + parent.mount( + Static( + f"[yellow]📦 {package} (will be installed automatically)[/yellow]" + ) + ) + + def _install_server(self) -> None: + """Install the selected server with configuration.""" + if not self.selected_server: + return + + try: + # Collect configuration inputs + env_vars = {} + cmd_args = {} + server_name = self.selected_server.name # Default fallback + + all_inputs = self.query(Input) + + for input_widget in all_inputs: + if input_widget.id == "server-name-input": + custom_name = input_widget.value.strip() + if custom_name: + server_name = custom_name + elif input_widget.id and input_widget.id.startswith("env-"): + var_name = input_widget.id[4:] # Remove "env-" prefix + value = input_widget.value.strip() + if value: + env_vars[var_name] = value + elif input_widget.id and input_widget.id.startswith("arg-"): + arg_name = input_widget.id[4:] # Remove "arg-" prefix + value = input_widget.value.strip() + if value: + cmd_args[arg_name] = value + + # Set environment variables in the current environment + for var, value in env_vars.items(): + os.environ[var] = value + + # Get server config with command line argument overrides + config_dict = self.selected_server.to_server_config(server_name, **cmd_args) + + # Update the config with actual environment variable values + if "env" in config_dict: + for env_key, env_value in config_dict["env"].items(): + # If it's a placeholder like $GITHUB_TOKEN, replace with actual value + if env_value.startswith("$"): + var_name = env_value[1:] # Remove the $ + if var_name in env_vars: + config_dict["env"][env_key] = env_vars[var_name] + + # Create and register the server + from code_puppy.mcp_ import ServerConfig + from code_puppy.mcp_.manager import get_mcp_manager + + server_config = ServerConfig( + id=server_name, + name=server_name, + type=config_dict.pop("type"), + enabled=True, + config=config_dict, + ) + + manager = get_mcp_manager() + server_id = manager.register_server(server_config) + + if server_id: + # Save to mcp_servers.json + from code_puppy.config import MCP_SERVERS_FILE + + if os.path.exists(MCP_SERVERS_FILE): + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + else: + servers = {} + data = {"mcp_servers": servers} + + servers[server_name] = config_dict + servers[server_name]["type"] = server_config.type + + os.makedirs(os.path.dirname(MCP_SERVERS_FILE), exist_ok=True) + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + + # Reload MCP servers + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + + self.dismiss( + { + "success": True, + "message": f"Successfully installed '{server_name}' from {self.selected_server.display_name}", + "server_name": server_name, + } + ) + else: + self.dismiss({"success": False, "message": "Failed to register server"}) + + except Exception as e: + self.dismiss( + {"success": False, "message": f"Installation failed: {str(e)}"} + ) + + def _install_custom_json(self) -> None: + """Install server from custom JSON configuration.""" + try: + name_input = self.query_one("#custom-name-input", Input) + json_input = self.query_one("#custom-json-input", TextArea) + + server_name = name_input.value.strip() + json_text = json_input.text.strip() + + if not server_name: + # Show error - need a name + return + + if not json_text: + # Show error - need JSON config + return + + # Parse JSON + try: + config_dict = json.loads(json_text) + except json.JSONDecodeError: + # Show error - invalid JSON + return + + # Validate required fields + if "type" not in config_dict: + # Show error - missing type + return + + # Extract type and create server config + server_type = config_dict.pop("type") + + # Create and register the server + from code_puppy.mcp_ import ServerConfig + from code_puppy.mcp_.manager import get_mcp_manager + + server_config = ServerConfig( + id=server_name, + name=server_name, + type=server_type, + enabled=True, + config=config_dict, + ) + + manager = get_mcp_manager() + server_id = manager.register_server(server_config) + + if server_id: + # Save to mcp_servers.json + from code_puppy.config import MCP_SERVERS_FILE + + if os.path.exists(MCP_SERVERS_FILE): + with open(MCP_SERVERS_FILE, "r") as f: + data = json.load(f) + servers = data.get("mcp_servers", {}) + else: + servers = {} + data = {"mcp_servers": servers} + + # Add the full config including type + full_config = config_dict.copy() + full_config["type"] = server_type + servers[server_name] = full_config + + os.makedirs(os.path.dirname(MCP_SERVERS_FILE), exist_ok=True) + with open(MCP_SERVERS_FILE, "w") as f: + json.dump(data, f, indent=2) + + # Reload MCP servers + from code_puppy.agent import reload_mcp_servers + + reload_mcp_servers() + + self.dismiss( + { + "success": True, + "message": f"Successfully installed custom server '{server_name}'", + "server_name": server_name, + } + ) + else: + self.dismiss( + {"success": False, "message": "Failed to register custom server"} + ) + + except Exception as e: + self.dismiss( + {"success": False, "message": f"Installation failed: {str(e)}"} + ) + + def on_key(self, event) -> None: + """Handle key events.""" + if event.key == "escape": + self.on_cancel_clicked() diff --git a/code_puppy/tui/screens/settings.py b/code_puppy/tui/screens/settings.py new file mode 100644 index 00000000..aaffa737 --- /dev/null +++ b/code_puppy/tui/screens/settings.py @@ -0,0 +1,306 @@ +""" +Settings modal screen. +""" + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, VerticalScroll +from textual.screen import ModalScreen +from textual.widgets import Button, Input, Select, Static + + +class SettingsScreen(ModalScreen): + """Settings configuration screen.""" + + DEFAULT_CSS = """ + SettingsScreen { + align: center middle; + } + + #settings-dialog { + width: 80; + height: 33; + border: thick $primary; + background: $surface; + padding: 1; + } + + #settings-form { + height: 1fr; + overflow: auto; + } + + .setting-row { + layout: horizontal; + height: 3; + margin: 0 0 1 0; + } + + .setting-label { + width: 20; + text-align: right; + padding: 1 1 0 0; + } + + .setting-input { + width: 1fr; + margin: 0 0 0 1; + } + + /* Additional styling for static input values */ + #yolo-static { + padding: 1 0 0 0; /* Align text vertically with other inputs */ + color: $success; /* Use success color to emphasize it's enabled */ + } + + #settings-buttons { + layout: horizontal; + height: 3; + align: center middle; + } + + #save-button, #cancel-button { + margin: 0 1; + } + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.settings_data = {} + + def compose(self) -> ComposeResult: + with Container(id="settings-dialog"): + yield Static("⚙️ Settings Configuration", id="settings-title") + # Make the form scrollable so long content fits + with VerticalScroll(id="settings-form"): + with Container(classes="setting-row"): + yield Static("Puppy Name:", classes="setting-label") + yield Input(id="puppy-name-input", classes="setting-input") + + with Container(classes="setting-row"): + yield Static("Owner Name:", classes="setting-label") + yield Input(id="owner-name-input", classes="setting-input") + + with Container(classes="setting-row"): + yield Static("Model:", classes="setting-label") + yield Select([], id="model-select", classes="setting-input") + + with Container(classes="setting-row"): + yield Static("YOLO Mode:", classes="setting-label") + yield Static( + "✅ Enabled (always on in TUI)", + id="yolo-static", + classes="setting-input", + ) + + with Container(classes="setting-row"): + yield Static("Protected Tokens:", classes="setting-label") + yield Input( + id="protected-tokens-input", + classes="setting-input", + placeholder="e.g., 50000", + ) + + with Container(classes="setting-row"): + yield Static("Compaction Strategy:", classes="setting-label") + yield Select( + [ + ("Summarization", "summarization"), + ("Truncation", "truncation"), + ], + id="compaction-strategy-select", + classes="setting-input", + ) + + with Container(classes="setting-row"): + yield Static("Compaction Threshold:", classes="setting-label") + yield Input( + id="compaction-threshold-input", + classes="setting-input", + placeholder="e.g., 0.85", + ) + + with Container(id="settings-buttons"): + yield Button("Save", id="save-button", variant="primary") + yield Button("Cancel", id="cancel-button") + + def on_mount(self) -> None: + """Load current settings when the screen mounts.""" + from code_puppy.config import ( + get_compaction_strategy, + get_compaction_threshold, + get_global_model_name, + get_owner_name, + get_protected_token_count, + get_puppy_name, + ) + + # Load current values + puppy_name_input = self.query_one("#puppy-name-input", Input) + owner_name_input = self.query_one("#owner-name-input", Input) + model_select = self.query_one("#model-select", Select) + protected_tokens_input = self.query_one("#protected-tokens-input", Input) + compaction_threshold_input = self.query_one( + "#compaction-threshold-input", Input + ) + compaction_strategy_select = self.query_one( + "#compaction-strategy-select", Select + ) + + puppy_name_input.value = get_puppy_name() or "" + owner_name_input.value = get_owner_name() or "" + protected_tokens_input.value = str(get_protected_token_count()) + compaction_threshold_input.value = str(get_compaction_threshold()) + compaction_strategy_select.value = get_compaction_strategy() + + # Load available models + self.load_model_options(model_select) + + # Set current model selection + current_model = get_global_model_name() + model_select.value = current_model + + # YOLO mode is always enabled in TUI mode + + def load_model_options(self, model_select): + """Load available models into the model select widget.""" + try: + # Use the same method that interactive mode uses to load models + + from code_puppy.model_factory import ModelFactory + + # Load models using the same path and method as interactive mode + models_data = ModelFactory.load_config() + + # Create options as (display_name, model_name) tuples + model_options = [] + for model_name, model_config in models_data.items(): + model_type = model_config.get("type", "unknown") + display_name = f"{model_name} ({model_type})" + model_options.append((display_name, model_name)) + + # Set the options on the select widget + model_select.set_options(model_options) + + except Exception: + # Fallback to a basic option if loading fails + model_select.set_options([("gpt-4.1 (openai)", "gpt-4.1")]) + + @on(Button.Pressed, "#save-button") + def save_settings(self) -> None: + """Save the modified settings.""" + from code_puppy.config import ( + get_model_context_length, + set_config_value, + set_model_name, + ) + + try: + # Get values from inputs + puppy_name = self.query_one("#puppy-name-input", Input).value.strip() + owner_name = self.query_one("#owner-name-input", Input).value.strip() + selected_model = self.query_one("#model-select", Select).value + yolo_mode = "true" # Always set to true in TUI mode + protected_tokens = self.query_one( + "#protected-tokens-input", Input + ).value.strip() + compaction_threshold = self.query_one( + "#compaction-threshold-input", Input + ).value.strip() + + # Validate and save + if puppy_name: + set_config_value("puppy_name", puppy_name) + if owner_name: + set_config_value("owner_name", owner_name) + + # Save model selection + if selected_model: + set_model_name(selected_model) + # Reload the active agent so model switch takes effect immediately + try: + from code_puppy.agents import get_current_agent + + current_agent = get_current_agent() + if hasattr(current_agent, "refresh_config"): + try: + current_agent.refresh_config() + except Exception: + ... + current_agent.reload_code_generation_agent() + except Exception: + # Non-fatal: settings saved; reload will happen on next run if needed + pass + + set_config_value("yolo_mode", yolo_mode) + + # Validate and save protected tokens + if protected_tokens.isdigit(): + tokens_value = int(protected_tokens) + model_context_length = get_model_context_length() + max_protected_tokens = int(model_context_length * 0.75) + + if tokens_value >= 1000: # Minimum validation + if tokens_value <= max_protected_tokens: # Maximum validation + set_config_value("protected_token_count", protected_tokens) + else: + raise ValueError( + f"Protected tokens must not exceed 75% of model context length ({max_protected_tokens} tokens for current model)" + ) + else: + raise ValueError("Protected tokens must be at least 1000") + elif protected_tokens: # If not empty but not digit + raise ValueError("Protected tokens must be a valid number") + + # Validate and save compaction threshold + if compaction_threshold: + try: + threshold_value = float(compaction_threshold) + if 0.8 <= threshold_value <= 0.95: # Same bounds as config function + set_config_value("compaction_threshold", compaction_threshold) + else: + raise ValueError( + "Compaction threshold must be between 0.8 and 0.95" + ) + except ValueError as ve: + if "must be between" in str(ve): + raise ve + else: + raise ValueError( + "Compaction threshold must be a valid decimal number" + ) + + # Save compaction strategy + compaction_strategy = self.query_one( + "#compaction-strategy-select", Select + ).value + if compaction_strategy in ["summarization", "truncation"]: + set_config_value("compaction_strategy", compaction_strategy) + + # Return success message with model change info + message = "Settings saved successfully!" + if selected_model: + message += f" Model switched to: {selected_model}" + + self.dismiss( + { + "success": True, + "message": message, + "model_changed": bool(selected_model), + } + ) + + except Exception as e: + self.dismiss( + {"success": False, "message": f"Error saving settings: {str(e)}"} + ) + + @on(Button.Pressed, "#cancel-button") + def cancel_settings(self) -> None: + """Cancel settings changes.""" + self.dismiss({"success": False, "message": "Settings cancelled"}) + + def on_key(self, event) -> None: + """Handle key events.""" + if event.key == "escape": + self.cancel_settings() diff --git a/code_puppy/tui/screens/tools.py b/code_puppy/tui/screens/tools.py new file mode 100644 index 00000000..0934eeca --- /dev/null +++ b/code_puppy/tui/screens/tools.py @@ -0,0 +1,74 @@ +""" +Tools modal screen. +""" + +from textual import on +from textual.app import ComposeResult +from textual.containers import Container, VerticalScroll +from textual.screen import ModalScreen +from textual.widgets import Button, Markdown, Static + +from code_puppy.tools.tools_content import tools_content + + +class ToolsScreen(ModalScreen): + """Tools modal screen""" + + DEFAULT_CSS = """ + ToolsScreen { + align: center middle; + } + + #tools-dialog { + width: 95; + height: 40; + border: thick $primary; + background: $surface; + padding: 1; + } + + #tools-content { + height: 1fr; + margin: 0 0 1 0; + overflow-y: auto; + } + + #tools-buttons { + layout: horizontal; + height: 3; + align: center middle; + } + + #dismiss-button { + margin: 0 1; + } + + #tools-markdown { + margin: 0; + padding: 0; + } + + /* Style markdown elements for better readability */ + Markdown { + margin: 0; + padding: 0; + } + """ + + def compose(self) -> ComposeResult: + with Container(id="tools-dialog"): + yield Static("🛠️ Cooper's Toolkit\n", id="tools-title") + with VerticalScroll(id="tools-content"): + yield Markdown(tools_content, id="tools-markdown") + with Container(id="tools-buttons"): + yield Button("Dismiss", id="dismiss-button", variant="primary") + + @on(Button.Pressed, "#dismiss-button") + def dismiss_tools(self) -> None: + """Dismiss the tools modal.""" + self.dismiss() + + def on_key(self, event) -> None: + """Handle key events.""" + if event.key == "escape": + self.dismiss() diff --git a/code_puppy/tui_state.py b/code_puppy/tui_state.py new file mode 100644 index 00000000..5a60d462 --- /dev/null +++ b/code_puppy/tui_state.py @@ -0,0 +1,55 @@ +# TUI State Management +# This module contains functions for managing the global TUI state + +from typing import Any + +# Global TUI state variables +_tui_mode: bool = False +_tui_app_instance: Any = None + + +def set_tui_mode(enabled: bool) -> None: + """Set the global TUI mode state. + + Args: + enabled: True if running in TUI mode, False otherwise + """ + global _tui_mode + _tui_mode = enabled + + +def is_tui_mode() -> bool: + """Check if the application is running in TUI mode. + + Returns: + True if running in TUI mode, False otherwise + """ + return _tui_mode + + +def set_tui_app_instance(app_instance: Any) -> None: + """Set the global TUI app instance reference. + + Args: + app_instance: The TUI app instance + """ + global _tui_app_instance + _tui_app_instance = app_instance + + +def get_tui_app_instance() -> Any: + """Get the current TUI app instance. + + Returns: + The TUI app instance if available, None otherwise + """ + return _tui_app_instance + + +def get_tui_mode() -> bool: + """Get the current TUI mode state. + + Returns: + True if running in TUI mode, False otherwise + """ + return _tui_mode diff --git a/code_puppy/version_checker.py b/code_puppy/version_checker.py new file mode 100644 index 00000000..448271a5 --- /dev/null +++ b/code_puppy/version_checker.py @@ -0,0 +1,35 @@ +import httpx + +from code_puppy.tools.common import console + + +def normalize_version(version_str): + if not version_str: + return version_str + return version_str.lstrip("v") + + +def versions_are_equal(current, latest): + return normalize_version(current) == normalize_version(latest) + + +def fetch_latest_version(package_name): + try: + response = httpx.get(f"https://pypi.org/pypi/{package_name}/json") + response.raise_for_status() # Raise an error for bad responses + data = response.json() + return data["info"]["version"] + except Exception as e: + print(f"Error fetching version: {e}") + return None + + +def default_version_mismatch_behavior(current_version): + latest_version = fetch_latest_version("code-puppy") + console.print(f"Current version: {current_version}") + console.print(f"Latest version: {latest_version}") + if latest_version and latest_version != current_version: + console.print( + f"[bold yellow]A new version of code puppy is available: {latest_version}[/bold yellow]" + ) + console.print("[bold green]Please consider updating![/bold green]") diff --git a/pyproject.toml b/pyproject.toml index 6cc10ad4..d0d63927 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,12 +4,12 @@ build-backend = "hatchling.build" [project] name = "code-puppy" -version = "0.0.12" +version = "0.0.204" description = "Code generation agent" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.11" dependencies = [ - "pydantic-ai>=0.1.0", + "pydantic-ai==1.0.5", "httpx>=0.24.1", "rich>=13.4.2", "logfire>=0.7.1", @@ -19,7 +19,27 @@ dependencies = [ "pytest-cov>=6.1.1", "ruff>=0.11.11", "httpx-limiter>=0.3.0", - "prompt-toolkit>=3.0.38", + "prompt-toolkit>=3.0.52", + "pathspec>=0.11.0", + "rapidfuzz>=3.13.0", + "json-repair>=0.46.2", + "fastapi>=0.110.0", + "uvicorn>=0.29.0", + "PyJWT>=2.8.0", + "textual>=5.0.0", + "termcolor>=3.1.0", + "textual-dev>=1.7.0", + "openai>=1.99.1", + "ripgrep>=14.1.0", + "tenacity>=8.2.0", + "playwright>=1.40.0", + "camoufox>=0.4.11", +] +dev-dependencies = [ + "pytest>=8.3.4", + "pytest-cov>=6.1.1", + "pytest-asyncio>=0.23.1", + "ruff>=0.11.11", ] authors = [ {name = "Michael Pfaffenberger"} @@ -27,16 +47,22 @@ authors = [ license = {text = "MIT"} classifiers = [ "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", ] +[project.urls] +repository = "https://github.com/mpfaffenberger/code_puppy" +HomePage = "https://github.com/mpfaffenberger/code_puppy" + + [project.scripts] code-puppy = "code_puppy.main:main_entry" +pup = "code_puppy.main:main_entry" [tool.logfire] ignore_no_config = true @@ -53,6 +79,7 @@ path = "code_puppy/models.json" [tool.pytest.ini_options] addopts = "--cov=code_puppy --cov-report=term-missing" testpaths = ["tests"] +asyncio_mode = "auto" [tool.coverage.run] omit = ["code_puppy/main.py"] diff --git a/tests/mcp/test_retry_manager.py b/tests/mcp/test_retry_manager.py new file mode 100644 index 00000000..e853812f --- /dev/null +++ b/tests/mcp/test_retry_manager.py @@ -0,0 +1,428 @@ +""" +Tests for the RetryManager class. +""" + +import asyncio +from unittest.mock import AsyncMock, Mock + +import httpx +import pytest + +from code_puppy.mcp_.retry_manager import ( + RetryManager, + RetryStats, + get_retry_manager, + retry_mcp_call, +) + + +class TestRetryManager: + """Test cases for RetryManager class.""" + + def setup_method(self): + """Setup for each test method.""" + self.retry_manager = RetryManager() + + @pytest.mark.asyncio + async def test_successful_call_no_retry(self): + """Test that successful calls don't trigger retries.""" + mock_func = AsyncMock(return_value="success") + + result = await self.retry_manager.retry_with_backoff( + func=mock_func, + max_attempts=3, + strategy="exponential", + server_id="test-server", + ) + + assert result == "success" + assert mock_func.call_count == 1 + + # Check that no retry stats were recorded for successful first attempt + stats = await self.retry_manager.get_retry_stats("test-server") + assert stats.total_retries == 0 + + @pytest.mark.asyncio + async def test_retry_with_eventual_success(self): + """Test that retries work when function eventually succeeds.""" + mock_func = AsyncMock( + side_effect=[ + ConnectionError("Connection failed"), + ConnectionError("Still failing"), + "success", + ] + ) + + result = await self.retry_manager.retry_with_backoff( + func=mock_func, max_attempts=3, strategy="fixed", server_id="test-server" + ) + + assert result == "success" + assert mock_func.call_count == 3 + + # Check retry stats - stats are recorded after retries are attempted + stats = await self.retry_manager.get_retry_stats("test-server") + assert stats.total_retries == 1 + assert stats.successful_retries == 1 + assert stats.failed_retries == 0 + assert stats.average_attempts == 3.0 # All 3 attempts were made before failure + + @pytest.mark.asyncio + async def test_retry_exhaustion(self): + """Test that function raises exception when all retries are exhausted.""" + mock_func = AsyncMock(side_effect=ConnectionError("Always failing")) + + with pytest.raises(ConnectionError): + await self.retry_manager.retry_with_backoff( + func=mock_func, + max_attempts=3, + strategy="fixed", + server_id="test-server", + ) + + assert mock_func.call_count == 3 + + # Check retry stats - stats are recorded after retries are attempted + stats = await self.retry_manager.get_retry_stats("test-server") + assert stats.total_retries == 1 + assert stats.successful_retries == 0 + assert stats.failed_retries == 1 + assert stats.average_attempts == 3.0 # All 3 attempts were made before failure + + @pytest.mark.asyncio + async def test_non_retryable_error(self): + """Test that non-retryable errors don't trigger retries.""" + # Create an HTTP 401 error (unauthorized) + response = Mock() + response.status_code = 401 + mock_func = AsyncMock( + side_effect=httpx.HTTPStatusError( + "Unauthorized", request=Mock(), response=response + ) + ) + + with pytest.raises(httpx.HTTPStatusError): + await self.retry_manager.retry_with_backoff( + func=mock_func, + max_attempts=3, + strategy="exponential", + server_id="test-server", + ) + + assert mock_func.call_count == 1 + + # Check retry stats - stats are recorded after retries are attempted + stats = await self.retry_manager.get_retry_stats("test-server") + assert stats.total_retries == 1 + assert stats.successful_retries == 0 + assert stats.failed_retries == 1 + assert stats.average_attempts == 1.0 # Only 1 attempt was made before giving up + + def test_calculate_backoff_fixed(self): + """Test fixed backoff strategy.""" + assert self.retry_manager.calculate_backoff(1, "fixed") == 1.0 + assert self.retry_manager.calculate_backoff(5, "fixed") == 1.0 + + def test_calculate_backoff_linear(self): + """Test linear backoff strategy.""" + assert self.retry_manager.calculate_backoff(1, "linear") == 1.0 + assert self.retry_manager.calculate_backoff(2, "linear") == 2.0 + assert self.retry_manager.calculate_backoff(3, "linear") == 3.0 + + def test_calculate_backoff_exponential(self): + """Test exponential backoff strategy.""" + assert self.retry_manager.calculate_backoff(1, "exponential") == 1.0 + assert self.retry_manager.calculate_backoff(2, "exponential") == 2.0 + assert self.retry_manager.calculate_backoff(3, "exponential") == 4.0 + assert self.retry_manager.calculate_backoff(4, "exponential") == 8.0 + + def test_calculate_backoff_exponential_jitter(self): + """Test exponential backoff with jitter.""" + # Test multiple times to verify jitter is applied + delays = [ + self.retry_manager.calculate_backoff(3, "exponential_jitter") + for _ in range(10) + ] + + # Base delay for attempt 3 should be 4.0 + # base_delay = 4.0 # Not used in this test + + # All delays should be within jitter range (±25%) + for delay in delays: + assert 3.0 <= delay <= 5.0 # 4.0 ± 25% + assert delay >= 0.1 # Minimum delay + + # Should have some variation (not all the same) + assert len(set(delays)) > 1 + + def test_calculate_backoff_unknown_strategy(self): + """Test that unknown strategy defaults to exponential.""" + assert self.retry_manager.calculate_backoff(3, "unknown") == 4.0 + + def test_should_retry_retryable_errors(self): + """Test that retryable errors are identified correctly.""" + # Network errors + assert self.retry_manager.should_retry(ConnectionError("Connection failed")) + assert self.retry_manager.should_retry(asyncio.TimeoutError("Timeout")) + assert self.retry_manager.should_retry(OSError("Network error")) + + # HTTP timeout + assert self.retry_manager.should_retry(httpx.TimeoutException("Timeout")) + assert self.retry_manager.should_retry(httpx.ConnectError("Connect failed")) + assert self.retry_manager.should_retry(httpx.ReadError("Read failed")) + + # Server errors (5xx) + response_500 = Mock() + response_500.status_code = 500 + http_error_500 = httpx.HTTPStatusError( + "Server error", request=Mock(), response=response_500 + ) + assert self.retry_manager.should_retry(http_error_500) + + # Rate limit (429) + response_429 = Mock() + response_429.status_code = 429 + http_error_429 = httpx.HTTPStatusError( + "Rate limit", request=Mock(), response=response_429 + ) + assert self.retry_manager.should_retry(http_error_429) + + # Rate limit (429) with JSON error info + response_429_json = Mock() + response_429_json.status_code = 429 + response_429_json.json.return_value = { + "error": {"message": "Rate limit exceeded. Please try again later."} + } + http_error_429_json = httpx.HTTPStatusError( + "Rate limit", + request=Mock(), + response=response_429_json, + ) + assert self.retry_manager.should_retry(http_error_429_json) + + # Timeout (408) + response_408 = Mock() + response_408.status_code = 408 + http_error_408 = httpx.HTTPStatusError( + "Request timeout", request=Mock(), response=response_408 + ) + assert self.retry_manager.should_retry(http_error_408) + + # JSON errors + assert self.retry_manager.should_retry(ValueError("Invalid JSON format")) + + def test_should_retry_non_retryable_errors(self): + """Test that non-retryable errors are identified correctly.""" + # Authentication errors + response_401 = Mock() + response_401.status_code = 401 + http_error_401 = httpx.HTTPStatusError( + "Unauthorized", request=Mock(), response=response_401 + ) + assert not self.retry_manager.should_retry(http_error_401) + + response_403 = Mock() + response_403.status_code = 403 + http_error_403 = httpx.HTTPStatusError( + "Forbidden", request=Mock(), response=response_403 + ) + assert not self.retry_manager.should_retry(http_error_403) + + # Client errors (4xx except 408) + response_400 = Mock() + response_400.status_code = 400 + http_error_400 = httpx.HTTPStatusError( + "Bad request", request=Mock(), response=response_400 + ) + assert not self.retry_manager.should_retry(http_error_400) + + response_404 = Mock() + response_404.status_code = 404 + http_error_404 = httpx.HTTPStatusError( + "Not found", request=Mock(), response=response_404 + ) + assert not self.retry_manager.should_retry(http_error_404) + + # Schema/validation errors + assert not self.retry_manager.should_retry( + ValueError("Schema validation failed") + ) + assert not self.retry_manager.should_retry(ValueError("Validation error")) + + # Authentication-related string errors + assert not self.retry_manager.should_retry(Exception("Authentication failed")) + assert not self.retry_manager.should_retry(Exception("Permission denied")) + assert not self.retry_manager.should_retry(Exception("Unauthorized access")) + assert not self.retry_manager.should_retry(Exception("Forbidden operation")) + + @pytest.mark.asyncio + async def test_record_and_get_retry_stats(self): + """Test recording and retrieving retry statistics.""" + # Record some retry stats + await self.retry_manager.record_retry("server-1", 2, success=True) + await self.retry_manager.record_retry("server-1", 3, success=False) + await self.retry_manager.record_retry("server-2", 1, success=True) + + # Get stats for server-1 + stats = await self.retry_manager.get_retry_stats("server-1") + assert stats.total_retries == 2 + assert stats.successful_retries == 1 + assert stats.failed_retries == 1 + assert stats.average_attempts == 2.5 # Average of 2 and 3 attempts + assert stats.last_retry is not None + + # Get stats for server-2 + stats = await self.retry_manager.get_retry_stats("server-2") + assert stats.total_retries == 1 + assert stats.successful_retries == 1 + assert stats.failed_retries == 0 + assert stats.average_attempts == 1.0 + + # Get stats for non-existent server + stats = await self.retry_manager.get_retry_stats("non-existent") + assert stats.total_retries == 0 + + @pytest.mark.asyncio + async def test_get_all_stats(self): + """Test getting all retry statistics.""" + # Record stats for multiple servers + await self.retry_manager.record_retry("server-1", 2, success=True) + await self.retry_manager.record_retry("server-2", 1, success=False) + + all_stats = await self.retry_manager.get_all_stats() + + assert len(all_stats) == 2 + assert "server-1" in all_stats + assert "server-2" in all_stats + assert all_stats["server-1"].total_retries == 1 + assert all_stats["server-2"].total_retries == 1 + + @pytest.mark.asyncio + async def test_clear_stats(self): + """Test clearing retry statistics.""" + # Record stats + await self.retry_manager.record_retry("server-1", 2, success=True) + await self.retry_manager.record_retry("server-2", 1, success=False) + + # Clear stats for server-1 + await self.retry_manager.clear_stats("server-1") + + stats = await self.retry_manager.get_retry_stats("server-1") + assert stats.total_retries == 0 + + # server-2 stats should remain + stats = await self.retry_manager.get_retry_stats("server-2") + assert stats.total_retries == 1 + + @pytest.mark.asyncio + async def test_clear_all_stats(self): + """Test clearing all retry statistics.""" + # Record stats + await self.retry_manager.record_retry("server-1", 2, success=True) + await self.retry_manager.record_retry("server-2", 1, success=False) + + # Clear all stats + await self.retry_manager.clear_all_stats() + + all_stats = await self.retry_manager.get_all_stats() + assert len(all_stats) == 0 + + +class TestRetryStats: + """Test cases for RetryStats class.""" + + def test_calculate_average_first_attempt(self): + """Test average calculation for first attempt.""" + stats = RetryStats() + stats.calculate_average(3) + assert stats.average_attempts == 3.0 + + def test_calculate_average_multiple_attempts(self): + """Test average calculation for multiple attempts.""" + stats = RetryStats() + stats.total_retries = 2 + stats.average_attempts = 2.5 # (2 + 3) / 2 + + stats.calculate_average(4) # Adding a third attempt with 4 tries + # New average: ((2.5 * 2) + 4) / 3 = (5 + 4) / 3 = 3.0 + assert stats.average_attempts == 3.0 + + +class TestGlobalRetryManager: + """Test cases for global retry manager functions.""" + + def test_get_retry_manager_singleton(self): + """Test that get_retry_manager returns the same instance.""" + manager1 = get_retry_manager() + manager2 = get_retry_manager() + + assert manager1 is manager2 + + @pytest.mark.asyncio + async def test_retry_mcp_call_convenience_function(self): + """Test the convenience function for MCP calls.""" + mock_func = AsyncMock(return_value="success") + + result = await retry_mcp_call( + func=mock_func, server_id="test-server", max_attempts=2, strategy="linear" + ) + + assert result == "success" + assert mock_func.call_count == 1 + + +class TestConcurrentOperations: + """Test cases for concurrent retry operations.""" + + def setup_method(self): + """Setup for each test method.""" + self.retry_manager = RetryManager() + + @pytest.mark.asyncio + async def test_concurrent_retries(self): + """Test that concurrent retries work correctly.""" + + async def failing_func(): + await asyncio.sleep(0.01) # Small delay + raise ConnectionError("Connection failed") + + async def succeeding_func(): + await asyncio.sleep(0.01) # Small delay + return "success" + + # Run concurrent retries + tasks = [ + self.retry_manager.retry_with_backoff( + succeeding_func, max_attempts=2, strategy="fixed", server_id="server-1" + ), + self.retry_manager.retry_with_backoff( + succeeding_func, max_attempts=2, strategy="fixed", server_id="server-2" + ), + ] + + results = await asyncio.gather(*tasks) + assert all(result == "success" for result in results) + + @pytest.mark.asyncio + async def test_concurrent_stats_operations(self): + """Test that concurrent statistics operations are thread-safe.""" + + async def record_stats(): + for i in range(10): + await self.retry_manager.record_retry( + f"server-{i % 3}", i + 1, success=True + ) + + # Run concurrent stats recording + await asyncio.gather(*[record_stats() for _ in range(5)]) + + # Verify stats were recorded correctly + all_stats = await self.retry_manager.get_all_stats() + assert len(all_stats) == 3 # server-0, server-1, server-2 + + # Each server should have recorded some retries + for server_id, stats in all_stats.items(): + assert stats.total_retries > 0 + assert ( + stats.successful_retries == stats.total_retries + ) # All were successful diff --git a/tests/test_agent_pinned_models.py b/tests/test_agent_pinned_models.py new file mode 100644 index 00000000..58e15e67 --- /dev/null +++ b/tests/test_agent_pinned_models.py @@ -0,0 +1,101 @@ +"""Tests for agent-specific model pinning functionality.""" + +import os +import tempfile + +import pytest + +from code_puppy.agents.agent_code_puppy import CodePuppyAgent +from code_puppy.config import ( + clear_agent_pinned_model, + get_agent_pinned_model, + get_global_model_name, + set_agent_pinned_model, +) + + +@pytest.fixture(autouse=True) +def mock_config_paths(monkeypatch): + """Fixture to monkeypatch config paths to temporary locations for all tests in this class.""" + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_config_dir = os.path.join(tmp_dir, ".code_puppy") + tmp_config_file = os.path.join(tmp_config_dir, "puppy.cfg") + monkeypatch.setattr("code_puppy.config.CONFIG_DIR", tmp_config_dir) + monkeypatch.setattr("code_puppy.config.CONFIG_FILE", tmp_config_file) + # Ensure the directory exists for the patched paths + os.makedirs(tmp_config_dir, exist_ok=True) + yield + + +class TestAgentPinnedModels: + """Test agent-specific model pinning.""" + + def test_set_and_get_agent_pinned_model(self): + """Test setting and getting pinned models for agents.""" + agent_name = "test-agent" + model_name = "gpt-4o" + + # Set pinned model + set_agent_pinned_model(agent_name, model_name) + + # Get pinned model + result = get_agent_pinned_model(agent_name) + assert result == model_name + + # Clean up + clear_agent_pinned_model(agent_name) + result = get_agent_pinned_model(agent_name) + assert result == "" or result is None + + def test_clear_agent_pinned_model(self): + """Test clearing pinned models for agents.""" + agent_name = "test-agent-clear" + model_name = "claude-3-5-sonnet" + + # Set and verify + set_agent_pinned_model(agent_name, model_name) + assert get_agent_pinned_model(agent_name) == model_name + + # Clear and verify + clear_agent_pinned_model(agent_name) + result = get_agent_pinned_model(agent_name) + assert result == "" or result is None + + def test_base_agent_get_model_name(self): + """Test BaseAgent.get_model_name() returns pinned model.""" + agent = CodePuppyAgent() + agent_name = agent.name # "code-puppy" + model_name = "gpt-4o-mini" + + # Initially no pinned model - should return global model + result = agent.get_model_name() + assert result == get_global_model_name() + + # Set pinned model + set_agent_pinned_model(agent_name, model_name) + + # Should return pinned model + result = agent.get_model_name() + assert result == model_name + + # Clean up + clear_agent_pinned_model(agent_name) + + def test_different_agents_different_models(self): + """Test that different agents can have different pinned models.""" + agent1_name = "agent-one" + agent1_model = "gpt-4o" + agent2_name = "agent-two" + agent2_model = "claude-3-5-sonnet" + + # Set different models for different agents + set_agent_pinned_model(agent1_name, agent1_model) + set_agent_pinned_model(agent2_name, agent2_model) + + # Verify each agent has its own model + assert get_agent_pinned_model(agent1_name) == agent1_model + assert get_agent_pinned_model(agent2_name) == agent2_model + + # Clean up + clear_agent_pinned_model(agent1_name) + clear_agent_pinned_model(agent2_name) diff --git a/tests/test_agent_refresh.py b/tests/test_agent_refresh.py new file mode 100644 index 00000000..b9fc53cf --- /dev/null +++ b/tests/test_agent_refresh.py @@ -0,0 +1,64 @@ +"""Test agent refresh functionality.""" + +import tempfile +from pathlib import Path +from unittest.mock import patch + +from code_puppy.agents import get_available_agents, refresh_agents + + +def test_refresh_agents_function(): + """Test that refresh_agents clears the cache and rediscovers agents.""" + # First call to get_available_agents should populate the cache + agents1 = get_available_agents() + + # Call refresh_agents + refresh_agents() + + # Second call should work (this tests that the cache was properly cleared) + agents2 = get_available_agents() + + # Should find the same agents (since we didn't add any new ones) + assert agents1 == agents2 + assert len(agents1) > 0 # Should have at least the built-in agents + + +def test_get_available_agents(): + """Test that get_available_agents works correctly.""" + # Call get_available_agents + agents = get_available_agents() + + # Should find agents + assert len(agents) > 0 + + +def test_json_agent_discovery_refresh(): + """Test that refresh picks up new JSON agents.""" + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "code_puppy.config.get_user_agents_directory", return_value=temp_dir + ): + # Get initial agents (should not include our test agent) + initial_agents = get_available_agents() + assert "test-agent" not in initial_agents + + # Create a test JSON agent file + test_agent_config = { + "name": "test-agent", + "description": "A test agent for refresh functionality", + "system_prompt": "You are a test agent.", + "tools": ["list_files", "read_file"], + } + + agent_file = Path(temp_dir) / "test-agent.json" + import json + + with open(agent_file, "w") as f: + json.dump(test_agent_config, f) + + # Refresh agents and check if the new agent is discovered + refreshed_agents = get_available_agents() + assert "test-agent" in refreshed_agents + assert ( + refreshed_agents["test-agent"] == "Test-Agent 🤖" + ) # Default display name format diff --git a/tests/test_agent_tools.py b/tests/test_agent_tools.py index 71f438d2..06756191 100644 --- a/tests/test_agent_tools.py +++ b/tests/test_agent_tools.py @@ -1,19 +1,25 @@ -from unittest.mock import patch, MagicMock -from code_puppy.tools.file_operations import read_file -from code_puppy.tools.command_runner import run_shell_command - -def test_read_file_nonexistent(): - with patch("os.path.exists", return_value=False): - result = read_file({}, "fake_path") - assert "error" in result - assert "does not exist" in result["error"] - - -def test_run_shell_command_success(): - mock_proc = MagicMock() - mock_proc.communicate.return_value = ("output", "") - mock_proc.returncode = 0 - with patch("subprocess.Popen", return_value=mock_proc): - result = run_shell_command({}, "echo hello") - assert result["success"] - assert "output" in result["stdout"] +"""Tests for agent tools functionality.""" + +from unittest.mock import MagicMock + +from code_puppy.tools.agent_tools import register_invoke_agent, register_list_agents + + +class TestAgentTools: + """Test suite for agent tools.""" + + def test_list_agents_tool(self): + """Test that list_agents tool registers correctly.""" + # Create a mock agent to register tools to + mock_agent = MagicMock() + + # Register the tool - this should not raise an exception + register_list_agents(mock_agent) + + def test_invoke_agent_tool(self): + """Test that invoke_agent tool registers correctly.""" + # Create a mock agent to register tools to + mock_agent = MagicMock() + + # Register the tool - this should not raise an exception + register_invoke_agent(mock_agent) diff --git a/tests/test_auto_save_session.py b/tests/test_auto_save_session.py new file mode 100644 index 00000000..d38d4b68 --- /dev/null +++ b/tests/test_auto_save_session.py @@ -0,0 +1,222 @@ +import os +from pathlib import Path +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest + +from code_puppy import config as cp_config +from code_puppy.session_storage import SessionMetadata + + +@pytest.fixture +def mock_config_paths(monkeypatch): + mock_home = "/mock_home" + mock_config_dir = os.path.join(mock_home, ".code_puppy") + mock_config_file = os.path.join(mock_config_dir, "puppy.cfg") + mock_contexts_dir = os.path.join(mock_config_dir, "contexts") + mock_autosave_dir = os.path.join(mock_config_dir, "autosaves") + + monkeypatch.setattr(cp_config, "CONFIG_DIR", mock_config_dir) + monkeypatch.setattr(cp_config, "CONFIG_FILE", mock_config_file) + monkeypatch.setattr(cp_config, "CONTEXTS_DIR", mock_contexts_dir) + monkeypatch.setattr(cp_config, "AUTOSAVE_DIR", mock_autosave_dir) + + original_expanduser = os.path.expanduser + + def mock_expanduser(path): + if path == "~": + return mock_home + if path.startswith("~" + os.sep): + return mock_home + path[1:] + return original_expanduser(path) + + monkeypatch.setattr(os.path, "expanduser", mock_expanduser) + return SimpleNamespace( + config_dir=mock_config_dir, + config_file=mock_config_file, + contexts_dir=mock_contexts_dir, + autosave_dir=mock_autosave_dir, + ) + + +class TestAutoSaveSession: + @patch("code_puppy.config.get_value") + def test_get_auto_save_session_enabled_true_values(self, mock_get_value): + true_values = ["true", "1", "YES", "on"] + for val in true_values: + mock_get_value.reset_mock() + mock_get_value.return_value = val + assert cp_config.get_auto_save_session() is True, f"Failed for config value: {val}" + mock_get_value.assert_called_once_with("auto_save_session") + + @patch("code_puppy.config.get_value") + def test_get_auto_save_session_enabled_false_values(self, mock_get_value): + false_values = ["false", "0", "NO", "off", "invalid"] + for val in false_values: + mock_get_value.reset_mock() + mock_get_value.return_value = val + assert cp_config.get_auto_save_session() is False, f"Failed for config value: {val}" + mock_get_value.assert_called_once_with("auto_save_session") + + @patch("code_puppy.config.get_value") + def test_get_auto_save_session_default_true(self, mock_get_value): + mock_get_value.return_value = None + assert cp_config.get_auto_save_session() is True + mock_get_value.assert_called_once_with("auto_save_session") + + @patch("code_puppy.config.set_config_value") + def test_set_auto_save_session_enabled(self, mock_set_config_value): + cp_config.set_auto_save_session(True) + mock_set_config_value.assert_called_once_with("auto_save_session", "true") + + @patch("code_puppy.config.set_config_value") + def test_set_auto_save_session_disabled(self, mock_set_config_value): + cp_config.set_auto_save_session(False) + mock_set_config_value.assert_called_once_with("auto_save_session", "false") + + +class TestMaxSavedSessions: + @patch("code_puppy.config.get_value") + def test_get_max_saved_sessions_valid_int(self, mock_get_value): + mock_get_value.return_value = "15" + assert cp_config.get_max_saved_sessions() == 15 + mock_get_value.assert_called_once_with("max_saved_sessions") + + @patch("code_puppy.config.get_value") + def test_get_max_saved_sessions_zero(self, mock_get_value): + mock_get_value.return_value = "0" + assert cp_config.get_max_saved_sessions() == 0 + mock_get_value.assert_called_once_with("max_saved_sessions") + + @patch("code_puppy.config.get_value") + def test_get_max_saved_sessions_negative_clamped_to_zero(self, mock_get_value): + mock_get_value.return_value = "-5" + assert cp_config.get_max_saved_sessions() == 0 + mock_get_value.assert_called_once_with("max_saved_sessions") + + @patch("code_puppy.config.get_value") + def test_get_max_saved_sessions_invalid_value_defaults(self, mock_get_value): + invalid_values = ["invalid", "not_a_number", "", None] + for val in invalid_values: + mock_get_value.reset_mock() + mock_get_value.return_value = val + assert cp_config.get_max_saved_sessions() == 20 # Default value + mock_get_value.assert_called_once_with("max_saved_sessions") + + @patch("code_puppy.config.get_value") + def test_get_max_saved_sessions_default(self, mock_get_value): + mock_get_value.return_value = None + assert cp_config.get_max_saved_sessions() == 20 + mock_get_value.assert_called_once_with("max_saved_sessions") + + @patch("code_puppy.config.set_config_value") + def test_set_max_saved_sessions(self, mock_set_config_value): + cp_config.set_max_saved_sessions(25) + mock_set_config_value.assert_called_once_with("max_saved_sessions", "25") + + @patch("code_puppy.config.set_config_value") + def test_set_max_saved_sessions_zero(self, mock_set_config_value): + cp_config.set_max_saved_sessions(0) + mock_set_config_value.assert_called_once_with("max_saved_sessions", "0") + + +class TestAutoSaveSessionFunctionality: + @patch("code_puppy.config.get_auto_save_session") + def test_auto_save_session_if_enabled_disabled(self, mock_get_auto_save): + mock_get_auto_save.return_value = False + result = cp_config.auto_save_session_if_enabled() + assert result is False + mock_get_auto_save.assert_called_once() + + @patch("code_puppy.config.save_session") + @patch("code_puppy.config.datetime") + @patch("code_puppy.config.get_auto_save_session") + @patch("code_puppy.agents.agent_manager.get_current_agent") + @patch("rich.console.Console") + def test_auto_save_session_if_enabled_success( + self, + mock_console_class, + mock_get_agent, + mock_get_auto_save, + mock_datetime, + mock_save_session, + mock_cleanup, + mock_config_paths, + ): + mock_get_auto_save.return_value = True + + history = ["hey", "listen"] + mock_agent = MagicMock() + mock_agent.get_message_history.return_value = history + mock_agent.estimate_tokens_for_message.return_value = 3 + mock_get_agent.return_value = mock_agent + + fake_now = MagicMock() + fake_now.strftime.return_value = "20240101_010101" + fake_now.isoformat.return_value = "2024-01-01T01:01:01" + mock_datetime.datetime.now.return_value = fake_now + + metadata = SessionMetadata( + session_name="auto_session_20240101_010101", + timestamp="2024-01-01T01:01:01", + message_count=len(history), + total_tokens=6, + pickle_path=Path(mock_config_paths.autosave_dir) / "auto_session_20240101_010101.pkl", + metadata_path=Path(mock_config_paths.autosave_dir) + / "auto_session_20240101_010101_meta.json", + ) + mock_save_session.return_value = metadata + + mock_console = MagicMock() + mock_console_class.return_value = mock_console + + result = cp_config.auto_save_session_if_enabled() + + assert result is True + mock_save_session.assert_called_once() + kwargs = mock_save_session.call_args.kwargs + assert kwargs["base_dir"] == Path(mock_config_paths.autosave_dir) + assert kwargs["session_name"] == "auto_session_20240101_010101" + mock_cleanup.assert_called_once() + mock_console.print.assert_called_once() + + @patch("code_puppy.config.get_auto_save_session") + @patch("code_puppy.agents.agent_manager.get_current_agent") + @patch("rich.console.Console") + def test_auto_save_session_if_enabled_exception( + self, mock_console_class, mock_get_agent, mock_get_auto_save, mock_config_paths + ): + mock_get_auto_save.return_value = True + mock_agent = MagicMock() + mock_agent.get_message_history.side_effect = Exception("Agent error") + mock_get_agent.return_value = mock_agent + + mock_console_instance = MagicMock() + mock_console_class.return_value = mock_console_instance + + result = cp_config.auto_save_session_if_enabled() + assert result is False + mock_console_instance.print.assert_called_once() + + +class TestFinalizeAutoSaveSession: + @patch("code_puppy.config.rotate_autosave_id", return_value="fresh_id") + @patch("code_puppy.config.auto_save_session_if_enabled", return_value=True) + def test_finalize_autosave_session_saves_and_rotates( + self, mock_auto_save, mock_rotate + ): + result = cp_config.finalize_autosave_session() + assert result == "fresh_id" + mock_auto_save.assert_called_once_with() + mock_rotate.assert_called_once_with() + + @patch("code_puppy.config.rotate_autosave_id", return_value="fresh_id") + @patch("code_puppy.config.auto_save_session_if_enabled", return_value=False) + def test_finalize_autosave_session_rotates_even_without_save( + self, mock_auto_save, mock_rotate + ): + result = cp_config.finalize_autosave_session() + assert result == "fresh_id" + mock_auto_save.assert_called_once_with() + mock_rotate.assert_called_once_with() diff --git a/tests/test_command_handler.py b/tests/test_command_handler.py new file mode 100644 index 00000000..6b2a54d9 --- /dev/null +++ b/tests/test_command_handler.py @@ -0,0 +1,845 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +from code_puppy.command_line.command_handler import handle_command + + +# Function to create a test context with patched messaging functions +def setup_messaging_mocks(): + """Set up mocks for all the messaging functions and return them in a dictionary.""" + mocks = {} + patch_targets = [ + "code_puppy.messaging.emit_info", + "code_puppy.messaging.emit_error", + "code_puppy.messaging.emit_warning", + "code_puppy.messaging.emit_success", + "code_puppy.messaging.emit_system_message", + ] + + for target in patch_targets: + function_name = target.split(".")[-1] + mocks[function_name] = patch(target) + + return mocks + + +def test_help_outputs_help(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + result = handle_command("/help") + assert result is True + mock_emit_info.assert_called() + assert any( + "Commands Help" in str(call) for call in (mock_emit_info.call_args_list) + ) + finally: + mocks["emit_info"].stop() + + +def test_cd_show_lists_directories(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with patch("code_puppy.command_line.utils.make_directory_table") as mock_table: + from rich.table import Table + + fake_table = Table() + mock_table.return_value = fake_table + result = handle_command("/cd") + assert result is True + # Just check that emit_info was called, the exact value is a Table object + mock_emit_info.assert_called() + finally: + mocks["emit_info"].stop() + + +def test_cd_valid_change(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + with ( + patch("os.path.expanduser", side_effect=lambda x: x), + patch("os.path.isabs", return_value=True), + patch("os.path.isdir", return_value=True), + patch("os.chdir") as mock_chdir, + ): + result = handle_command("/cd /some/dir") + assert result is True + mock_chdir.assert_called_once_with("/some/dir") + mock_emit_success.assert_called_with("Changed directory to: /some/dir") + finally: + mocks["emit_success"].stop() + + +def test_cd_invalid_directory(): + mocks = setup_messaging_mocks() + mock_emit_error = mocks["emit_error"].start() + + try: + with ( + patch("os.path.expanduser", side_effect=lambda x: x), + patch("os.path.isabs", return_value=True), + patch("os.path.isdir", return_value=False), + ): + result = handle_command("/cd /not/a/dir") + assert result is True + mock_emit_error.assert_called_with("Not a directory: /not/a/dir") + finally: + mocks["emit_error"].stop() + + +def test_m_sets_model(): + # Simplified test - just check that the command handler returns True + with ( + patch("code_puppy.messaging.emit_success"), + patch( + "code_puppy.command_line.model_picker_completion.update_model_in_input", + return_value="some_model", + ), + patch( + "code_puppy.command_line.model_picker_completion.get_active_model", + return_value="gpt-9001", + ), + ): + result = handle_command("/mgpt-9001") + assert result is True + + +def test_m_unrecognized_model_lists_options(): + mocks = setup_messaging_mocks() + mock_emit_warning = mocks["emit_warning"].start() + + try: + with ( + patch( + "code_puppy.command_line.model_picker_completion.update_model_in_input", + return_value=None, + ), + patch( + "code_puppy.command_line.model_picker_completion.load_model_names", + return_value=["a", "b", "c"], + ), + ): + result = handle_command("/m not-a-model") + assert result is True + # Check that emit_warning was called with appropriate messages + mock_emit_warning.assert_called() + assert any( + "Usage: /model or /m " in str(call) + for call in mock_emit_warning.call_args_list + ) + assert any( + "Available models" in str(call) + for call in mock_emit_warning.call_args_list + ) + finally: + mocks["emit_warning"].stop() + + +def test_set_config_value_equals(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + with ( + patch("code_puppy.config.set_config_value") as mock_set_cfg, + patch( + "code_puppy.config.get_config_keys", return_value=["pony", "rainbow"] + ), + ): + result = handle_command("/set pony=rainbow") + assert result is True + mock_set_cfg.assert_called_once_with("pony", "rainbow") + mock_emit_success.assert_called() + assert any( + "Set" in str(call) and "pony" in str(call) and "rainbow" in str(call) + for call in mock_emit_success.call_args_list + ) + finally: + mocks["emit_success"].stop() + + +def test_set_config_value_space(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + with ( + patch("code_puppy.config.set_config_value") as mock_set_cfg, + patch( + "code_puppy.config.get_config_keys", return_value=["pony", "rainbow"] + ), + ): + result = handle_command("/set pony rainbow") + assert result is True + mock_set_cfg.assert_called_once_with("pony", "rainbow") + mock_emit_success.assert_called() + assert any( + "Set" in str(call) and "pony" in str(call) and "rainbow" in str(call) + for call in mock_emit_success.call_args_list + ) + finally: + mocks["emit_success"].stop() + + +def test_set_config_only_key(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + with ( + patch("code_puppy.config.set_config_value") as mock_set_cfg, + patch("code_puppy.config.get_config_keys", return_value=["key"]), + ): + result = handle_command("/set pony") + assert result is True + mock_set_cfg.assert_called_once_with("pony", "") + mock_emit_success.assert_called() + assert any( + "Set" in str(call) and "pony" in str(call) + for call in mock_emit_success.call_args_list + ) + finally: + mocks["emit_success"].stop() + + +def test_show_status(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with ( + patch( + "code_puppy.command_line.model_picker_completion.get_active_model", + return_value="MODEL-X", + ), + patch("code_puppy.config.get_owner_name", return_value="Ivan"), + patch("code_puppy.config.get_puppy_name", return_value="Biscuit"), + patch("code_puppy.config.get_yolo_mode", return_value=True), + ): + result = handle_command("/show") + assert result is True + mock_emit_info.assert_called() + assert any( + "Puppy Status" in str(call) + and "Ivan" in str(call) + and "Biscuit" in str(call) + and "MODEL-X" in str(call) + for call in mock_emit_info.call_args_list + ) + finally: + mocks["emit_info"].stop() + + +def test_unknown_command(): + mocks = setup_messaging_mocks() + mock_emit_warning = mocks["emit_warning"].start() + + try: + result = handle_command("/unknowncmd") + assert result is True + mock_emit_warning.assert_called() + assert any( + "Unknown command" in str(call) for call in mock_emit_warning.call_args_list + ) + finally: + mocks["emit_warning"].stop() + + +def test_bare_slash_shows_current_model(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with patch( + "code_puppy.command_line.model_picker_completion.get_active_model", + return_value="yarn", + ): + result = handle_command("/") + assert result is True + mock_emit_info.assert_called() + assert any( + "Current Model:" in str(call) and "yarn" in str(call) + for call in mock_emit_info.call_args_list + ) + finally: + mocks["emit_info"].stop() + + +def test_set_no_args_prints_usage(): + mocks = setup_messaging_mocks() + mock_emit_warning = mocks["emit_warning"].start() + + try: + with patch("code_puppy.config.get_config_keys", return_value=["foo", "bar"]): + result = handle_command("/set") + assert result is True + mock_emit_warning.assert_called() + assert any( + "Usage" in str(call) and "Config keys" in str(call) + for call in mock_emit_warning.call_args_list + ) + finally: + mocks["emit_warning"].stop() + + +def test_set_missing_key_errors(): + mocks = setup_messaging_mocks() + mock_emit_error = mocks["emit_error"].start() + + try: + # This will enter the 'else' branch printing 'You must supply a key.' + with patch("code_puppy.config.get_config_keys", return_value=["foo", "bar"]): + result = handle_command("/set =value") + assert result is True + mock_emit_error.assert_called_with("You must supply a key.") + finally: + mocks["emit_error"].stop() + + +def test_non_command_returns_false(): + # No need for mocks here since we're just testing the return value + result = handle_command("echo hi") + assert result is False + + +def test_bare_slash_with_spaces(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with patch( + "code_puppy.command_line.model_picker_completion.get_active_model", + return_value="zoom", + ): + result = handle_command("/ ") + assert result is True + mock_emit_info.assert_called() + assert any( + "Current Model:" in str(call) and "zoom" in str(call) + for call in mock_emit_info.call_args_list + ) + finally: + mocks["emit_info"].stop() + + +def test_agent_switch_triggers_autosave_rotation(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_success = mocks["emit_success"].start() + + try: + current_agent = SimpleNamespace(name="code-puppy", display_name="Code Puppy") + new_agent = SimpleNamespace( + name="reviewer", + display_name="Reviewer", + description="Checks code", + ) + new_agent.reload_code_generation_agent = MagicMock() + + with ( + patch( + "code_puppy.agents.get_current_agent", + side_effect=[current_agent, new_agent], + ), + patch( + "code_puppy.agents.get_available_agents", + return_value={"code-puppy": "Code Puppy", "reviewer": "Reviewer"}, + ), + patch( + "code_puppy.command_line.command_handler.finalize_autosave_session", + return_value="fresh_id", + ) as mock_finalize, + patch( + "code_puppy.agents.set_current_agent", + return_value=True, + ) as mock_set, + ): + result = handle_command("/agent reviewer") + assert result is True + mock_finalize.assert_called_once_with() + mock_set.assert_called_once_with("reviewer") + + assert any("Switched to agent" in str(call) for call in mock_emit_success.call_args_list) + assert any("Auto-save session rotated" in str(call) for call in mock_emit_info.call_args_list) + finally: + mocks["emit_info"].stop() + mocks["emit_success"].stop() + + +def test_agent_switch_same_agent_skips_rotation(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + current_agent = SimpleNamespace(name="code-puppy", display_name="Code Puppy") + with ( + patch( + "code_puppy.agents.get_current_agent", + return_value=current_agent, + ), + patch( + "code_puppy.agents.get_available_agents", + return_value={"code-puppy": "Code Puppy"}, + ), + patch( + "code_puppy.command_line.command_handler.finalize_autosave_session", + ) as mock_finalize, + patch( + "code_puppy.agents.set_current_agent", + ) as mock_set, + ): + result = handle_command("/agent code-puppy") + assert result is True + mock_finalize.assert_not_called() + mock_set.assert_not_called() + + assert any("Already using agent" in str(call) for call in mock_emit_info.call_args_list) + finally: + mocks["emit_info"].stop() + + +def test_agent_switch_unknown_agent_skips_rotation(): + mocks = setup_messaging_mocks() + mock_emit_warning = mocks["emit_warning"].start() + + try: + with ( + patch( + "code_puppy.agents.get_available_agents", + return_value={"code-puppy": "Code Puppy"}, + ), + patch( + "code_puppy.command_line.command_handler.finalize_autosave_session", + ) as mock_finalize, + patch( + "code_puppy.agents.set_current_agent", + ) as mock_set, + ): + result = handle_command("/agent reviewer") + assert result is True + mock_finalize.assert_not_called() + mock_set.assert_not_called() + + assert any("Available agents" in str(call) for call in mock_emit_warning.call_args_list) + finally: + mocks["emit_warning"].stop() + + +def test_tools_displays_tools_md(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with ( + patch("pathlib.Path.exists", return_value=True), + patch("builtins.open", create=True) as mock_open, + ): + mock_open.return_value.__enter__.return_value.read.return_value = ( + "# Mock TOOLS.md content\n\nThis is a test." + ) + result = handle_command("/tools") + assert result is True + mock_emit_info.assert_called_once() + # Check that emit_info was called with a Markdown object + call_args = mock_emit_info.call_args[0][0] + # The call should be with a Rich Markdown object + from rich.markdown import Markdown + + assert isinstance(call_args, Markdown) + finally: + mocks["emit_info"].stop() + + +def test_tools_file_not_found(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + # Since we now use tools_content.py, we just verify that tools are displayed + # without needing to read from a file + with patch("code_puppy.tools.tools_content.tools_content", "# Mock content"): + result = handle_command("/tools") + assert result is True + mock_emit_info.assert_called_once() + # Check that emit_info was called with a Markdown object + call_args = mock_emit_info.call_args[0][0] + # The call should be with a Rich Markdown object + from rich.markdown import Markdown + + assert isinstance(call_args, Markdown) + finally: + mocks["emit_info"].stop() + + +def test_tools_read_error(): + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + # Test handling when there's an issue with tools_content - it should still work + # by falling back to an empty or default string if the imported content fails + with patch( + "code_puppy.command_line.command_handler.tools_content", + "# Fallback content", + ): + result = handle_command("/tools") + assert result is True + mock_emit_info.assert_called_once() + # Check that emit_info was called with a Markdown object + call_args = mock_emit_info.call_args[0][0] + # The call should be with a Rich Markdown object + from rich.markdown import Markdown + + assert isinstance(call_args, Markdown) + finally: + mocks["emit_info"].stop() + + +def test_exit_command(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + result = handle_command("/exit") + assert result is True + mock_emit_success.assert_called_with("Goodbye!") + finally: + mocks["emit_success"].stop() + + +def test_quit_command(): + mocks = setup_messaging_mocks() + mock_emit_success = mocks["emit_success"].start() + + try: + result = handle_command("/quit") + assert result is True + mock_emit_success.assert_called_with("Goodbye!") + finally: + mocks["emit_success"].stop() + + +# History Command Tests + +def test_history_default_behavior(): + """Test basic /history command with default 10 message limit.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_warning = mocks["emit_warning"].start() + + try: + # Mock the dependencies + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="test_session"), + patch("code_puppy.session_storage.list_sessions", return_value=["test_session", "other_session"]), + patch("pathlib.Path") as mock_path, + ): + # Create mock agent with history + mock_agent = MagicMock() + + # Create mock messages + mock_message1 = MagicMock() + mock_message1.role = "user" + mock_message1.content = "Hello world" + + mock_message2 = MagicMock() + mock_message2.role = "assistant" + mock_message2.content = "Hi there!" + + mock_agent.get_message_history.return_value = [mock_message1, mock_message2] + mock_agent.estimate_tokens_for_message.return_value = 10 + mock_get_agent.return_value = mock_agent + + result = handle_command("/history") + + assert result is True + mock_emit_info.assert_called() + + # Check that session info was displayed + calls = [str(call) for call in mock_emit_info.call_args_list] + assert any("Current Autosave Session" in call and "test_session" in call for call in calls) + # Check that messages count is displayed + assert any("Messages:" in call and "2" in call and "total" in call for call in calls) + + finally: + mocks["emit_info"].stop() + mocks["emit_warning"].stop() + + +def test_history_no_messages(): + """Test /history when there are no messages in current session.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_warning = mocks["emit_warning"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="empty_session"), + patch("code_puppy.session_storage.list_sessions", return_value=["empty_session"]), + patch("code_puppy.config.AUTOSAVE_DIR", "/tmp/test_autosave"), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + mock_agent.get_message_history.return_value = [] + mock_get_agent.return_value = mock_agent + + result = handle_command("/history") + + assert result is True + # Check that warning contains expected message (ignoring message_group) + mock_emit_warning.assert_called_once() + args = mock_emit_warning.call_args[0][0] + assert "No message history in current session. Ask me something first!" in args + + finally: + mocks["emit_info"].stop() + mocks["emit_warning"].stop() + + +def test_history_linecount_valid(): + """Test /history with valid linecount parameter.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_error = mocks["emit_error"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="test_session"), + patch("code_puppy.session_storage.list_sessions", return_value=["test_session"]), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + + # Create 15 mock messages + messages = [] + for i in range(15): + mock_msg = MagicMock() + mock_msg.role = "user" if i % 2 == 0 else "assistant" + mock_msg.content = f"Message {i + 1}" + messages.append(mock_msg) + + mock_agent.get_message_history.return_value = messages + mock_agent.estimate_tokens_for_message.return_value = 10 + mock_get_agent.return_value = mock_agent + + # Test with linecount of 5 + result = handle_command("/history 5") + + assert result is True + mock_emit_error.assert_not_called() + + calls = [str(call) for call in mock_emit_info.call_args_list] + assert any("Recent Messages (last 5)" in call for call in calls) + assert any("... and 10 earlier messages" in call for call in calls) + + finally: + mocks["emit_info"].stop() + mocks["emit_error"].stop() + + +def test_history_linecount_invalid(): + """Test /history with invalid linecount parameters.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_error = mocks["emit_error"].start() + + try: + test_cases = [ + ("/history 0", "Line count must be a positive integer"), + ("/history -5", "Line count must be a positive integer"), + ("/history abc", "Invalid line count: abc"), + ("/history 5.5", "Invalid line count: 5.5"), + ("/history 5 10", "Usage: /history [N]"), + ("/history 1 2 3", "Usage: /history [N]"), + ] + + for command, expected_error in test_cases: + mock_emit_error.reset_mock() + + result = handle_command(command) + + assert result is True + mock_emit_error.assert_called_once() + assert expected_error in str(mock_emit_error.call_args) + + finally: + mocks["emit_info"].stop() + mocks["emit_error"].stop() + + +def test_history_display_formatting(): + """Test that message display formatting works correctly.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="test_session"), + patch("code_puppy.config.get_puppy_name", return_value="Blufus"), + patch("code_puppy.session_storage.list_sessions", return_value=["test_session"]), + patch("code_puppy.config.AUTOSAVE_DIR", "/tmp/test_autosave"), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + + # Create simple message with role and content + mock_message = MagicMock() + mock_message.role = "user" + mock_message.content = "What is Python?" + + mock_agent.get_message_history.return_value = [mock_message] + mock_agent.estimate_tokens_for_message.return_value = 50 + mock_get_agent.return_value = mock_agent + + result = handle_command("/history") + + assert result is True + + calls = [str(call) for call in mock_emit_info.call_args_list] + # Check that user content is displayed + assert any("What is Python?" in call for call in calls) + # Just check that content is displayed - role format may vary + assert len(calls) > 0 # Ensure we got some output + + finally: + mocks["emit_info"].stop() + + +def test_history_thinking_duration_extraction(): + """Test that thinking duration is extracted correctly from different formats.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="test_session"), + patch("code_puppy.config.get_puppy_name", return_value="Blufus"), + patch("code_puppy.session_storage.list_sessions", return_value=["test_session"]), + patch("code_puppy.config.AUTOSAVE_DIR", "/tmp/test_autosave"), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + + # Create simple assistant message + mock_message = MagicMock() + mock_message.role = "assistant" + mock_message.content = "Here is my response." + + mock_agent.get_message_history.return_value = [mock_message] + mock_agent.estimate_tokens_for_message.return_value = 30 + mock_get_agent.return_value = mock_agent + + result = handle_command("/history") + + assert result is True + + calls = [str(call) for call in mock_emit_info.call_args_list] + # Check that assistant content is displayed + assert any("Here is my response" in call for call in calls) + # Just check that content is displayed - role format may vary + assert len(calls) > 0 # Ensure we got some output + + finally: + mocks["emit_info"].stop() + + +def test_history_edge_cases(): + """Test edge cases and error conditions.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + mock_emit_warning = mocks["emit_warning"].start() + mock_emit_error = mocks["emit_error"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="test_session"), + patch("code_puppy.session_storage.list_sessions", return_value=["test_session"]), + patch("code_puppy.config.AUTOSAVE_DIR", "/tmp/test_autosave"), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + + # Test agent error + mock_agent.get_message_history.side_effect = Exception("Agent error") + mock_get_agent.return_value = mock_agent + + result = handle_command("/history") + + assert result is True + # Check that the error message contains the expected text (ignoring message_group) + mock_emit_error.assert_called_once() + args = mock_emit_error.call_args[0][0] + assert "Failed to get current message history: Agent error" in args + + # Test malformed message + mock_agent.get_message_history.side_effect = None + mock_agent.get_message_history.return_value = ["not a proper message object"] + + mock_emit_error.reset_mock() + mock_emit_info.reset_mock() + + result = handle_command("/history") + + assert result is True + # Should handle gracefully and show some info + mock_emit_info.assert_called() + + finally: + mocks["emit_info"].stop() + mocks["emit_warning"].stop() + mocks["emit_error"].stop() + + +def test_history_session_management(): + """Test that session management integration works correctly.""" + mocks = setup_messaging_mocks() + mock_emit_info = mocks["emit_info"].start() + + try: + with ( + patch("code_puppy.agents.agent_manager.get_current_agent") as mock_get_agent, + patch("code_puppy.config.get_current_autosave_session_name", return_value="current_session"), + patch("code_puppy.session_storage.list_sessions") as mock_list_sessions, + patch("code_puppy.config.AUTOSAVE_DIR", "/tmp/test_autosave"), + patch("pathlib.Path"), + ): + mock_agent = MagicMock() + mock_agent.get_message_history.return_value = [] + mock_agent.estimate_tokens_for_message.return_value = 0 + mock_get_agent.return_value = mock_agent + + # Test with other sessions available + mock_list_sessions.return_value = ["current_session", "other_session1", "other_session2"] + + result = handle_command("/history") + + assert result is True + + calls = [str(call) for call in mock_emit_info.call_args_list] + assert any("Current Autosave Session" in call and "current_session" in call for call in calls) + # Just check that the other sessions section is shown + assert any("Other Autosave Sessions Available" in call for call in calls) + + # Test with no other sessions + mock_emit_info.reset_mock() + mock_list_sessions.return_value = ["current_session"] + + result = handle_command("/history") + + calls = [str(call) for call in mock_emit_info.call_args_list] + # Should not show "Other Sessions" section + assert not any("Other Autosave Sessions Available" in call for call in calls) + + finally: + mocks["emit_info"].stop() diff --git a/tests/test_command_line_attachments.py b/tests/test_command_line_attachments.py new file mode 100644 index 00000000..e30f547a --- /dev/null +++ b/tests/test_command_line_attachments.py @@ -0,0 +1,180 @@ +"""Tests for CLI attachment parsing and execution helpers.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest +from pydantic_ai import BinaryContent + +from code_puppy.command_line.attachments import ( + DEFAULT_ACCEPTED_IMAGE_EXTENSIONS, + parse_prompt_attachments, +) +from code_puppy.main import run_prompt_with_attachments + + +@pytest.mark.parametrize("extension", sorted(DEFAULT_ACCEPTED_IMAGE_EXTENSIONS)) +def test_parse_prompt_attachments_handles_images(tmp_path: Path, extension: str) -> None: + attachment_path = tmp_path / f"image{extension}" + attachment_path.write_bytes(b"fake-bytes") + + processed = parse_prompt_attachments(str(attachment_path)) + + assert processed.prompt == "Describe the attached files in detail." + assert processed.attachments + assert processed.attachments[0].content.media_type.startswith("image/") + assert processed.warnings == [] + + +def test_parse_prompt_attachments_handles_unquoted_spaces(tmp_path: Path) -> None: + file_path = tmp_path / "cute pupper image.png" + file_path.write_bytes(b"imaginary") + + raw_prompt = f"please inspect {file_path} right now" + + processed = parse_prompt_attachments(raw_prompt) + + assert processed.prompt == "please inspect right now" + assert len(processed.attachments) == 1 + assert processed.attachments[0].content.media_type.startswith("image/") + assert processed.warnings == [] + + +def test_parse_prompt_handles_dragged_escaped_spaces(tmp_path: Path) -> None: + # Simulate a path with backslash-escaped spaces as produced by drag-and-drop + file_path = tmp_path / "cute pupper image.png" + file_path.write_bytes(b"imaginary") + + # Simulate terminal drag-and-drop: insert backslash before spaces + escaped_display_path = str(file_path).replace(" ", r"\ ") + raw_prompt = f"please inspect {escaped_display_path} right now" + + processed = parse_prompt_attachments(raw_prompt) + + assert processed.prompt == "please inspect right now" + assert len(processed.attachments) == 1 + assert processed.attachments[0].content.media_type.startswith("image/") + assert processed.warnings == [] + + +def test_parse_prompt_attachments_trims_trailing_punctuation(tmp_path: Path) -> None: + file_path = tmp_path / "doggo photo.png" + file_path.write_bytes(b"bytes") + + processed = parse_prompt_attachments(f"look {file_path}, please") + + assert processed.prompt == "look please" + assert len(processed.attachments) == 1 + assert processed.attachments[0].content.media_type.startswith("image/") + assert processed.warnings == [] + + +def test_parse_prompt_skips_unsupported_types(tmp_path: Path) -> None: + unsupported = tmp_path / "notes.xyz" + unsupported.write_text("hello") + + processed = parse_prompt_attachments(str(unsupported)) + + assert processed.prompt == str(unsupported) + assert processed.attachments == [] + assert "Unsupported attachment type" in processed.warnings[0] + + +def test_parse_prompt_leaves_urls_untouched() -> None: + url = "https://example.com/cute-puppy.png" + processed = parse_prompt_attachments(f"describe {url}") + + assert processed.prompt == f"describe {url}" + assert processed.attachments == [] + assert processed.link_attachments == [] + + +@pytest.mark.asyncio +async def test_run_prompt_with_attachments_passes_binary(tmp_path: Path) -> None: + image_path = tmp_path / "dragged.png" + image_path.write_bytes(b"png-bytes") + + raw_prompt = f"Check this {image_path}" + + fake_agent = AsyncMock() + fake_result = AsyncMock() + fake_agent.run_with_mcp.return_value = fake_result + + with patch("code_puppy.messaging.emit_warning") as mock_warn, patch( + "code_puppy.messaging.emit_system_message" + ) as mock_system: + result = await run_prompt_with_attachments( + fake_agent, + raw_prompt, + spinner_console=None, + ) + + assert result is fake_result + fake_agent.run_with_mcp.assert_awaited_once() + _, kwargs = fake_agent.run_with_mcp.await_args + assert kwargs["attachments"] + assert isinstance(kwargs["attachments"][0], BinaryContent) + assert kwargs["link_attachments"] == [] + mock_warn.assert_not_called() + mock_system.assert_called_once() + + +@pytest.mark.asyncio +async def test_run_prompt_with_attachments_uses_spinner(tmp_path: Path) -> None: + pdf_path = tmp_path / "paper.pdf" + pdf_path.write_bytes(b"%PDF") + + fake_agent = AsyncMock() + fake_agent.run_with_mcp.return_value = AsyncMock() + + dummy_console = object() + + with patch("code_puppy.messaging.spinner.ConsoleSpinner") as mock_spinner, patch( + "code_puppy.messaging.emit_system_message" + ), patch("code_puppy.messaging.emit_warning"): + await run_prompt_with_attachments( + fake_agent, + f"please summarise {pdf_path}", + spinner_console=dummy_console, + use_spinner=True, + ) + + mock_spinner.assert_called_once() + args, kwargs = mock_spinner.call_args + assert kwargs["console"] is dummy_console + + +@pytest.mark.asyncio +async def test_run_prompt_with_attachments_warns_on_blank_prompt() -> None: + fake_agent = AsyncMock() + + with patch("code_puppy.messaging.emit_warning") as mock_warn, patch( + "code_puppy.messaging.emit_system_message" + ): + result = await run_prompt_with_attachments( + fake_agent, + " ", + spinner_console=None, + use_spinner=False, + ) + + assert result is None + fake_agent.run_with_mcp.assert_not_called() + mock_warn.assert_called_once() + + +@pytest.mark.parametrize( + "raw", + [ + "https://example.com/file.pdf", + "https://example.com/image.png", + ], +) +def test_parse_prompt_does_not_parse_urls_anymore(raw: str) -> None: + processed = parse_prompt_attachments(raw) + + assert processed.prompt == raw + assert processed.link_attachments == [] diff --git a/tests/test_command_runner.py b/tests/test_command_runner.py deleted file mode 100644 index 5ca84a74..00000000 --- a/tests/test_command_runner.py +++ /dev/null @@ -1,56 +0,0 @@ -import subprocess -from unittest.mock import patch, MagicMock -from code_puppy.tools.command_runner import run_shell_command - - -def test_run_shell_command_timeout(): - with patch("subprocess.Popen") as mock_popen: - mock_process = mock_popen.return_value - - # When communicate is called with timeout param, raise TimeoutExpired - def communicate_side_effect(*args, **kwargs): - if "timeout" in kwargs: - raise subprocess.TimeoutExpired(cmd="dummy_command", timeout=1) - return ("", "") - - mock_process.communicate.side_effect = communicate_side_effect - mock_process.kill.side_effect = lambda: None - with patch("builtins.input", return_value="yes"): - result = run_shell_command(None, "dummy_command", timeout=1) - assert result.get("timeout") is True - assert "timed out" in result.get("error") - assert result.get("exit_code") is None - - -def test_run_shell_command_empty_command(): - result = run_shell_command(None, " ") - assert "error" in result - assert result["error"] == "Command cannot be empty" - - -def test_run_shell_command_success(): - mock_process = MagicMock() - mock_process.communicate.return_value = ("output", "") - mock_process.returncode = 0 - - with patch("subprocess.Popen", return_value=mock_process): - with patch("builtins.input", return_value="yes"): - result = run_shell_command(None, "echo test") - - assert result["exit_code"] == 0 - assert result["stdout"] == "output" - assert result["stderr"] == "" - - -def test_run_shell_command_error(): - mock_process = MagicMock() - mock_process.communicate.return_value = ("", "error") - mock_process.returncode = 1 - - with patch("subprocess.Popen", return_value=mock_process): - with patch("builtins.input", return_value="yes"): - result = run_shell_command(None, "badcmd") - - assert result["exit_code"] == 1 - assert result["stdout"] == "" - assert result["stderr"] == "error" diff --git a/tests/test_compaction_strategy.py b/tests/test_compaction_strategy.py new file mode 100644 index 00000000..6b19059e --- /dev/null +++ b/tests/test_compaction_strategy.py @@ -0,0 +1,112 @@ +import configparser +import os +import tempfile +from unittest.mock import patch + +from code_puppy.config import ( + CONFIG_DIR, + CONFIG_FILE, + DEFAULT_SECTION, + get_compaction_strategy, +) + + +def test_default_compaction_strategy(): + """Test that the default compaction strategy is truncation""" + with patch("code_puppy.config.get_value") as mock_get_value: + mock_get_value.return_value = None + strategy = get_compaction_strategy() + assert strategy == "truncation" + + +def test_set_compaction_strategy_truncation(): + """Test that we can set the compaction strategy to truncation""" + # Create a temporary config directory and file + with tempfile.TemporaryDirectory() as temp_dir: + original_config_dir = CONFIG_DIR + original_config_file = CONFIG_FILE + + # Monkey patch the config directory + import code_puppy.config + + code_puppy.config.CONFIG_DIR = temp_dir + code_puppy.config.CONFIG_FILE = os.path.join(temp_dir, "puppy.cfg") + + # Create the config file with truncation strategy + config = configparser.ConfigParser() + config[DEFAULT_SECTION] = {} + config[DEFAULT_SECTION]["compaction_strategy"] = "truncation" + + # Write the config + with open(code_puppy.config.CONFIG_FILE, "w") as f: + config.write(f) + + # Test that the strategy is read correctly + strategy = get_compaction_strategy() + assert strategy == "truncation" + + # Reset the config directory + code_puppy.config.CONFIG_DIR = original_config_dir + code_puppy.config.CONFIG_FILE = original_config_file + + +def test_set_compaction_strategy_summarization(): + """Test that we can set the compaction strategy to summarization""" + # Create a temporary config directory and file + with tempfile.TemporaryDirectory() as temp_dir: + original_config_dir = CONFIG_DIR + original_config_file = CONFIG_FILE + + # Monkey patch the config directory + import code_puppy.config + + code_puppy.config.CONFIG_DIR = temp_dir + code_puppy.config.CONFIG_FILE = os.path.join(temp_dir, "puppy.cfg") + + # Create the config file with summarization strategy + config = configparser.ConfigParser() + config[DEFAULT_SECTION] = {} + config[DEFAULT_SECTION]["compaction_strategy"] = "summarization" + + # Write the config + with open(code_puppy.config.CONFIG_FILE, "w") as f: + config.write(f) + + # Test that the strategy is read correctly + strategy = get_compaction_strategy() + assert strategy == "summarization" + + # Reset the config directory + code_puppy.config.CONFIG_DIR = original_config_dir + code_puppy.config.CONFIG_FILE = original_config_file + + +def test_set_compaction_strategy_invalid(): + """Test that an invalid compaction strategy defaults to truncation""" + # Create a temporary config directory and file + with tempfile.TemporaryDirectory() as temp_dir: + original_config_dir = CONFIG_DIR + original_config_file = CONFIG_FILE + + # Monkey patch the config directory + import code_puppy.config + + code_puppy.config.CONFIG_DIR = temp_dir + code_puppy.config.CONFIG_FILE = os.path.join(temp_dir, "puppy.cfg") + + # Create the config file with an invalid strategy + config = configparser.ConfigParser() + config[DEFAULT_SECTION] = {} + config[DEFAULT_SECTION]["compaction_strategy"] = "invalid_strategy" + + # Write the config + with open(code_puppy.config.CONFIG_FILE, "w") as f: + config.write(f) + + # Test that the strategy defaults to truncation + strategy = get_compaction_strategy() + assert strategy == "truncation" + + # Reset the config directory + code_puppy.config.CONFIG_DIR = original_config_dir + code_puppy.config.CONFIG_FILE = original_config_file diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 00000000..e4159d4c --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,684 @@ +import configparser +import os +from unittest.mock import MagicMock, mock_open, patch + +import pytest + +from code_puppy import config as cp_config + +# Define constants used in config.py to avoid direct import if they change +CONFIG_DIR_NAME = ".code_puppy" +CONFIG_FILE_NAME = "puppy.cfg" +DEFAULT_SECTION_NAME = "puppy" + + +@pytest.fixture +def mock_config_paths(monkeypatch): + # Ensure that tests don't interact with the actual user's config + mock_home = "/mock_home" + mock_config_dir = os.path.join(mock_home, CONFIG_DIR_NAME) + mock_config_file = os.path.join(mock_config_dir, CONFIG_FILE_NAME) + + monkeypatch.setattr(cp_config, "CONFIG_DIR", mock_config_dir) + monkeypatch.setattr(cp_config, "CONFIG_FILE", mock_config_file) + monkeypatch.setattr( + os.path, + "expanduser", + lambda path: mock_home if path == "~" else os.path.expanduser(path), + ) + return mock_config_dir, mock_config_file + + +class TestEnsureConfigExists: + def test_no_config_dir_or_file_prompts_and_creates( + self, mock_config_paths, monkeypatch + ): + mock_cfg_dir, mock_cfg_file = mock_config_paths + + mock_os_path_exists = MagicMock() + # First call for CONFIG_DIR, second for CONFIG_FILE (though isfile is used for file) + mock_os_path_exists.side_effect = [ + False, + False, + ] # CONFIG_DIR not exists, CONFIG_FILE not exists + monkeypatch.setattr(os.path, "exists", mock_os_path_exists) + + mock_os_path_isfile = MagicMock(return_value=False) # CONFIG_FILE not exists + monkeypatch.setattr(os.path, "isfile", mock_os_path_isfile) + + mock_makedirs = MagicMock() + monkeypatch.setattr(os, "makedirs", mock_makedirs) + + mock_input_values = { + "What should we name the puppy? ": "TestPuppy", + "What's your name (so Code Puppy knows its owner)? ": "TestOwner", + } + mock_input = MagicMock(side_effect=lambda prompt: mock_input_values[prompt]) + monkeypatch.setattr("builtins.input", mock_input) + + m_open = mock_open() + with patch("builtins.open", m_open): + config_parser = cp_config.ensure_config_exists() + + mock_makedirs.assert_called_once_with(mock_cfg_dir, exist_ok=True) + m_open.assert_called_once_with(mock_cfg_file, "w") + + # Check what was written to file + # The configparser object's write method is called with a file-like object + # We can inspect the calls to that file-like object (m_open()) + # However, it's easier to check the returned config_parser object + assert config_parser.sections() == [DEFAULT_SECTION_NAME] + assert config_parser.get(DEFAULT_SECTION_NAME, "puppy_name") == "TestPuppy" + assert config_parser.get(DEFAULT_SECTION_NAME, "owner_name") == "TestOwner" + + def test_config_dir_exists_file_does_not_prompts_and_creates( + self, mock_config_paths, monkeypatch + ): + mock_cfg_dir, mock_cfg_file = mock_config_paths + + mock_os_path_exists = MagicMock(return_value=True) # CONFIG_DIR exists + monkeypatch.setattr(os.path, "exists", mock_os_path_exists) + + mock_os_path_isfile = MagicMock(return_value=False) # CONFIG_FILE not exists + monkeypatch.setattr(os.path, "isfile", mock_os_path_isfile) + + mock_makedirs = MagicMock() + monkeypatch.setattr(os, "makedirs", mock_makedirs) + + mock_input_values = { + "What should we name the puppy? ": "DirExistsPuppy", + "What's your name (so Code Puppy knows its owner)? ": "DirExistsOwner", + } + mock_input = MagicMock(side_effect=lambda prompt: mock_input_values[prompt]) + monkeypatch.setattr("builtins.input", mock_input) + + m_open = mock_open() + with patch("builtins.open", m_open): + config_parser = cp_config.ensure_config_exists() + + mock_makedirs.assert_not_called() # Dir already exists + m_open.assert_called_once_with(mock_cfg_file, "w") + + assert config_parser.sections() == [DEFAULT_SECTION_NAME] + assert config_parser.get(DEFAULT_SECTION_NAME, "puppy_name") == "DirExistsPuppy" + assert config_parser.get(DEFAULT_SECTION_NAME, "owner_name") == "DirExistsOwner" + + def test_config_file_exists_and_complete_no_prompt_no_write( + self, mock_config_paths, monkeypatch + ): + mock_cfg_dir, mock_cfg_file = mock_config_paths + + monkeypatch.setattr( + os.path, "exists", MagicMock(return_value=True) + ) # CONFIG_DIR exists + monkeypatch.setattr( + os.path, "isfile", MagicMock(return_value=True) + ) # CONFIG_FILE exists + + # Mock configparser.ConfigParser instance and its methods + mock_config_instance = configparser.ConfigParser() + mock_config_instance[DEFAULT_SECTION_NAME] = { + "puppy_name": "ExistingPuppy", + "owner_name": "ExistingOwner", + } + + def mock_read(file_path): + # Simulate reading by populating the mock_config_instance if it were empty + # For this test, we assume it's already populated as if read from file + pass + + mock_cp = MagicMock(return_value=mock_config_instance) + mock_config_instance.read = MagicMock(side_effect=mock_read) + monkeypatch.setattr(configparser, "ConfigParser", mock_cp) + + mock_input = MagicMock() + monkeypatch.setattr("builtins.input", mock_input) + + m_open = mock_open() + with patch("builtins.open", m_open): + returned_config_parser = cp_config.ensure_config_exists() + + mock_input.assert_not_called() + m_open.assert_not_called() # No write should occur + mock_config_instance.read.assert_called_once_with(mock_cfg_file) + + assert returned_config_parser == mock_config_instance + assert ( + returned_config_parser.get(DEFAULT_SECTION_NAME, "puppy_name") + == "ExistingPuppy" + ) + + def test_config_file_exists_missing_one_key_prompts_and_writes( + self, mock_config_paths, monkeypatch + ): + mock_cfg_dir, mock_cfg_file = mock_config_paths + + monkeypatch.setattr(os.path, "exists", MagicMock(return_value=True)) + monkeypatch.setattr(os.path, "isfile", MagicMock(return_value=True)) + + mock_config_instance = configparser.ConfigParser() + mock_config_instance[DEFAULT_SECTION_NAME] = { + "puppy_name": "PartialPuppy" + } # owner_name is missing + + def mock_read(file_path): + pass + + mock_cp = MagicMock(return_value=mock_config_instance) + mock_config_instance.read = MagicMock(side_effect=mock_read) + monkeypatch.setattr(configparser, "ConfigParser", mock_cp) + + mock_input_values = { + "What's your name (so Code Puppy knows its owner)? ": "PartialOwnerFilled" + } + # Only owner_name should be prompted + mock_input = MagicMock(side_effect=lambda prompt: mock_input_values[prompt]) + monkeypatch.setattr("builtins.input", mock_input) + + m_open = mock_open() + with patch("builtins.open", m_open): + returned_config_parser = cp_config.ensure_config_exists() + + mock_input.assert_called_once() # Only called for the missing key + m_open.assert_called_once_with(mock_cfg_file, "w") + mock_config_instance.read.assert_called_once_with(mock_cfg_file) + + assert ( + returned_config_parser.get(DEFAULT_SECTION_NAME, "puppy_name") + == "PartialPuppy" + ) + assert ( + returned_config_parser.get(DEFAULT_SECTION_NAME, "owner_name") + == "PartialOwnerFilled" + ) + + +class TestGetValue: + @patch("configparser.ConfigParser") + def test_get_value_exists(self, mock_config_parser_class, mock_config_paths): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + mock_parser_instance.get.return_value = "test_value" + mock_config_parser_class.return_value = mock_parser_instance + + val = cp_config.get_value("test_key") + + mock_config_parser_class.assert_called_once() + mock_parser_instance.read.assert_called_once_with(mock_cfg_file) + mock_parser_instance.get.assert_called_once_with( + DEFAULT_SECTION_NAME, "test_key", fallback=None + ) + assert val == "test_value" + + @patch("configparser.ConfigParser") + def test_get_value_not_exists(self, mock_config_parser_class, mock_config_paths): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + mock_parser_instance.get.return_value = None # Simulate key not found + mock_config_parser_class.return_value = mock_parser_instance + + val = cp_config.get_value("missing_key") + + assert val is None + + @patch("configparser.ConfigParser") + def test_get_value_config_file_not_exists_graceful( + self, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + mock_parser_instance.get.return_value = None + mock_config_parser_class.return_value = mock_parser_instance + + val = cp_config.get_value("any_key") + assert val is None + + +class TestSimpleGetters: + @patch("code_puppy.config.get_value") + def test_get_puppy_name_exists(self, mock_get_value): + mock_get_value.return_value = "MyPuppy" + assert cp_config.get_puppy_name() == "MyPuppy" + mock_get_value.assert_called_once_with("puppy_name") + + @patch("code_puppy.config.get_value") + def test_get_puppy_name_not_exists_uses_default(self, mock_get_value): + mock_get_value.return_value = None + assert cp_config.get_puppy_name() == "Puppy" # Default value + mock_get_value.assert_called_once_with("puppy_name") + + @patch("code_puppy.config.get_value") + def test_get_owner_name_exists(self, mock_get_value): + mock_get_value.return_value = "MyOwner" + assert cp_config.get_owner_name() == "MyOwner" + mock_get_value.assert_called_once_with("owner_name") + + @patch("code_puppy.config.get_value") + def test_get_owner_name_not_exists_uses_default(self, mock_get_value): + mock_get_value.return_value = None + assert cp_config.get_owner_name() == "Master" # Default value + mock_get_value.assert_called_once_with("owner_name") + + +class TestGetConfigKeys: + @patch("configparser.ConfigParser") + def test_get_config_keys_with_existing_keys( + self, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + + section_proxy = {"key1": "val1", "key2": "val2"} + mock_parser_instance.__contains__.return_value = True + mock_parser_instance.__getitem__.return_value = section_proxy + mock_config_parser_class.return_value = mock_parser_instance + + keys = cp_config.get_config_keys() + + mock_parser_instance.read.assert_called_once_with(mock_cfg_file) + assert keys == sorted( + [ + "allow_recursion", + "auto_save_session", + "compaction_strategy", + "compaction_threshold", + "key1", + "key2", + "max_saved_sessions", + "message_limit", + "model", + "openai_reasoning_effort", + "protected_token_count", + "yolo_mode", + ] + ) + + @patch("configparser.ConfigParser") + def test_get_config_keys_empty_config( + self, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + mock_parser_instance.__contains__.return_value = False + mock_config_parser_class.return_value = mock_parser_instance + + keys = cp_config.get_config_keys() + assert keys == sorted( + [ + "allow_recursion", + "auto_save_session", + "compaction_strategy", + "compaction_threshold", + "max_saved_sessions", + "message_limit", + "model", + "openai_reasoning_effort", + "protected_token_count", + "yolo_mode", + ] + ) + + +class TestSetConfigValue: + @patch("configparser.ConfigParser") + @patch("builtins.open", new_callable=mock_open) + def test_set_config_value_new_key_section_exists( + self, mock_file_open, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + + section_dict = {} + mock_parser_instance.read.return_value = [mock_cfg_file] + mock_parser_instance.__contains__.return_value = True + mock_parser_instance.__getitem__.return_value = section_dict + mock_config_parser_class.return_value = mock_parser_instance + + cp_config.set_config_value("a_new_key", "a_new_value") + + assert section_dict["a_new_key"] == "a_new_value" + mock_file_open.assert_called_once_with(mock_cfg_file, "w") + mock_parser_instance.write.assert_called_once_with(mock_file_open()) + + @patch("configparser.ConfigParser") + @patch("builtins.open", new_callable=mock_open) + def test_set_config_value_update_existing_key( + self, mock_file_open, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + + section_dict = {"existing_key": "old_value"} + mock_parser_instance.read.return_value = [mock_cfg_file] + mock_parser_instance.__contains__.return_value = True + mock_parser_instance.__getitem__.return_value = section_dict + mock_config_parser_class.return_value = mock_parser_instance + + cp_config.set_config_value("existing_key", "updated_value") + + assert section_dict["existing_key"] == "updated_value" + mock_file_open.assert_called_once_with(mock_cfg_file, "w") + mock_parser_instance.write.assert_called_once_with(mock_file_open()) + + @patch("configparser.ConfigParser") + @patch("builtins.open", new_callable=mock_open) + def test_set_config_value_section_does_not_exist_creates_it( + self, mock_file_open, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + + created_sections_store = {} + + def mock_contains_check(section_name): + return section_name in created_sections_store + + def mock_setitem_for_section_creation(section_name, value_usually_empty_dict): + created_sections_store[section_name] = value_usually_empty_dict + + def mock_getitem_for_section_access(section_name): + return created_sections_store[section_name] + + mock_parser_instance.read.return_value = [mock_cfg_file] + mock_parser_instance.__contains__.side_effect = mock_contains_check + mock_parser_instance.__setitem__.side_effect = mock_setitem_for_section_creation + mock_parser_instance.__getitem__.side_effect = mock_getitem_for_section_access + + mock_config_parser_class.return_value = mock_parser_instance + + cp_config.set_config_value("key_in_new_section", "value_in_new_section") + + assert DEFAULT_SECTION_NAME in created_sections_store + assert ( + created_sections_store[DEFAULT_SECTION_NAME]["key_in_new_section"] + == "value_in_new_section" + ) + + mock_file_open.assert_called_once_with(mock_cfg_file, "w") + mock_parser_instance.write.assert_called_once_with(mock_file_open()) + + +class TestModelName: + @patch("code_puppy.config.get_value") + @patch("code_puppy.config._validate_model_exists") + def test_get_model_name_exists(self, mock_validate_model_exists, mock_get_value): + mock_get_value.return_value = "test_model_from_config" + mock_validate_model_exists.return_value = True + assert cp_config.get_global_model_name() == "test_model_from_config" + mock_get_value.assert_called_once_with("model") + mock_validate_model_exists.assert_called_once_with("test_model_from_config") + + @patch("configparser.ConfigParser") + @patch("builtins.open", new_callable=mock_open) + def test_set_model_name( + self, mock_file_open, mock_config_parser_class, mock_config_paths + ): + _, mock_cfg_file = mock_config_paths + mock_parser_instance = MagicMock() + + section_dict = {} + # This setup ensures that config[DEFAULT_SECTION_NAME] operations work on section_dict + # and that the section is considered to exist or is created as needed. + mock_parser_instance.read.return_value = [mock_cfg_file] + + # Simulate that the section exists or will be created and then available + def get_section_or_create(name): + if name == DEFAULT_SECTION_NAME: + # Ensure subsequent checks for section existence pass + mock_parser_instance.__contains__ = ( + lambda s_name: s_name == DEFAULT_SECTION_NAME + ) + return section_dict + raise KeyError(name) + + mock_parser_instance.__getitem__.side_effect = get_section_or_create + # Initial check for section existence (might be False if section needs creation) + # We'll simplify by assuming it's True after first access or creation attempt. + _section_exists_initially = False + + def initial_contains_check(s_name): + nonlocal _section_exists_initially + if s_name == DEFAULT_SECTION_NAME: + if _section_exists_initially: + return True + _section_exists_initially = ( + True # Simulate it's created on first miss then setitem + ) + return False + return False + + mock_parser_instance.__contains__.side_effect = initial_contains_check + + def mock_setitem_for_section(name, value): + if name == DEFAULT_SECTION_NAME: # For config[DEFAULT_SECTION_NAME] = {} + pass # section_dict is already our target via __getitem__ side_effect + else: # For config[DEFAULT_SECTION_NAME][key] = value + section_dict[name] = value + + mock_parser_instance.__setitem__.side_effect = mock_setitem_for_section + mock_config_parser_class.return_value = mock_parser_instance + + cp_config.set_model_name("super_model_7000") + + assert section_dict["model"] == "super_model_7000" + mock_file_open.assert_called_once_with(mock_cfg_file, "w") + mock_parser_instance.write.assert_called_once_with(mock_file_open()) + + +class TestGetYoloMode: + @patch("code_puppy.config.get_value") + def test_get_yolo_mode_from_config_true(self, mock_get_value): + true_values = ["true", "1", "YES", "ON"] + for val in true_values: + mock_get_value.reset_mock() + mock_get_value.return_value = val + assert cp_config.get_yolo_mode() is True, f"Failed for config value: {val}" + mock_get_value.assert_called_once_with("yolo_mode") + + @patch("code_puppy.config.get_value") + def test_get_yolo_mode_not_in_config_defaults_true(self, mock_get_value): + mock_get_value.return_value = None + + assert cp_config.get_yolo_mode() is True + mock_get_value.assert_called_once_with("yolo_mode") + + +class TestCommandHistory: + @patch("os.path.isfile") + @patch("pathlib.Path.touch") + @patch("os.path.expanduser") + def test_initialize_command_history_file_creates_new_file( + self, mock_expanduser, mock_touch, mock_isfile, mock_config_paths + ): + # Setup + mock_cfg_dir, _ = mock_config_paths + # First call is for COMMAND_HISTORY_FILE, second is for old history file + mock_isfile.side_effect = [False, False] # Both files don't exist + mock_expanduser.return_value = "/mock_home" + + # Call the function + cp_config.initialize_command_history_file() + + # Assert + assert mock_isfile.call_count == 2 + assert mock_isfile.call_args_list[0][0][0] == cp_config.COMMAND_HISTORY_FILE + mock_touch.assert_called_once() + + @patch("os.path.isfile") + @patch("pathlib.Path.touch") + @patch("os.path.expanduser") + @patch("shutil.copy2") + @patch("pathlib.Path.unlink") + def test_initialize_command_history_file_migrates_old_file( + self, + mock_unlink, + mock_copy2, + mock_expanduser, + mock_touch, + mock_isfile, + mock_config_paths, + ): + # Setup + mock_cfg_dir, _ = mock_config_paths + # First call checks if COMMAND_HISTORY_FILE exists, second call checks if old history file exists + mock_isfile.side_effect = [False, True] + mock_expanduser.return_value = "/mock_home" + + # Call the function + cp_config.initialize_command_history_file() + + # Assert + assert mock_isfile.call_count == 2 + mock_touch.assert_called_once() + mock_copy2.assert_called_once() + mock_unlink.assert_called_once() + + @patch("os.path.isfile") + def test_initialize_command_history_file_file_exists( + self, mock_isfile, mock_config_paths + ): + # Setup + mock_isfile.return_value = True # File already exists + + # Call the function + cp_config.initialize_command_history_file() + + # Assert + mock_isfile.assert_called_once_with(cp_config.COMMAND_HISTORY_FILE) + # No other function should be called since file exists + + @patch("builtins.open", new_callable=mock_open) + @patch("datetime.datetime") + def test_save_command_to_history_with_timestamp( + self, mock_datetime, mock_file, mock_config_paths + ): + # Setup + mock_cfg_dir, mock_cfg_file = mock_config_paths + mock_now = MagicMock() + mock_now.isoformat.return_value = "2023-01-01T12:34:56" + mock_datetime.now.return_value = mock_now + + # Call the function + cp_config.save_command_to_history("test command") + + # Assert + mock_file.assert_called_once_with(cp_config.COMMAND_HISTORY_FILE, "a") + mock_file().write.assert_called_once_with( + "\n# 2023-01-01T12:34:56\ntest command\n" + ) + mock_now.isoformat.assert_called_once_with(timespec="seconds") + + @patch("builtins.open") + @patch("rich.console.Console") + def test_save_command_to_history_handles_error( + self, mock_console_class, mock_file, mock_config_paths + ): + # Setup + mock_file.side_effect = Exception("Test error") + mock_console_instance = MagicMock() + mock_console_class.return_value = mock_console_instance + + # Call the function + cp_config.save_command_to_history("test command") + + # Assert + mock_console_instance.print.assert_called_once() + + +class TestDefaultModelSelection: + def setup_method(self): + # Clear the cache before each test to ensure consistent behavior + cp_config.clear_model_cache() + + @patch("code_puppy.config.get_value") + @patch("code_puppy.config._validate_model_exists") + @patch("code_puppy.config._default_model_from_models_json") + def test_get_model_name_no_stored_model( + self, mock_default_model, mock_validate_model_exists, mock_get_value + ): + # When no model is stored in config, get_model_name should return the default model + mock_get_value.return_value = None + mock_default_model.return_value = "gpt-5" + + result = cp_config.get_global_model_name() + + assert result == "gpt-5" + mock_get_value.assert_called_once_with("model") + mock_validate_model_exists.assert_not_called() + mock_default_model.assert_called_once() + + @patch("code_puppy.config.get_value") + @patch("code_puppy.config._validate_model_exists") + @patch("code_puppy.config._default_model_from_models_json") + def test_get_model_name_invalid_model( + self, mock_default_model, mock_validate_model_exists, mock_get_value + ): + # When stored model doesn't exist in models.json, should return default model + mock_get_value.return_value = "invalid-model" + mock_validate_model_exists.return_value = False + mock_default_model.return_value = "gpt-5" + + result = cp_config.get_global_model_name() + + assert result == "gpt-5" + mock_get_value.assert_called_once_with("model") + mock_validate_model_exists.assert_called_once_with("invalid-model") + mock_default_model.assert_called_once() + + @patch("code_puppy.model_factory.ModelFactory.load_config") + def test_default_model_from_models_json_with_valid_config(self, mock_load_config): + # Test that the first model from models.json is selected when config is valid + mock_load_config.return_value = { + "test-model-1": {"type": "openai", "name": "test-model-1"}, + "test-model-2": {"type": "anthropic", "name": "test-model-2"}, + "test-model-3": {"type": "gemini", "name": "test-model-3"}, + } + + result = cp_config._default_model_from_models_json() + + assert result == "test-model-1" + mock_load_config.assert_called_once() + + @patch("code_puppy.model_factory.ModelFactory.load_config") + def test_default_model_from_models_json_empty_config(self, mock_load_config): + # Test that gpt-5 is returned when models.json is empty + mock_load_config.return_value = {} + + result = cp_config._default_model_from_models_json() + + assert result == "gpt-5" + mock_load_config.assert_called_once() + + @patch("code_puppy.model_factory.ModelFactory.load_config") + def test_default_model_from_models_json_exception_handling(self, mock_load_config): + # Test that gpt-5 is returned when there's an exception loading models.json + mock_load_config.side_effect = Exception("Config load failed") + + result = cp_config._default_model_from_models_json() + + assert result == "gpt-5" + mock_load_config.assert_called_once() + + def test_default_model_from_models_json_actual_file(self): + # Test that the actual first model from models.json is returned + # This test uses the real models.json file to verify correct behavior + result = cp_config._default_model_from_models_json() + + # The first model in models.json should be selected + assert result == "gpt-5" + + @patch("code_puppy.config.get_value") + def test_get_model_name_with_nonexistent_model_uses_first_from_models_json( + self, mock_get_value + ): + # Test the exact scenario: when a model doesn't exist in the config, + # the first model from models.json is selected + mock_get_value.return_value = "non-existent-model" + + # This will use the real models.json file through the ModelFactory + result = cp_config.get_global_model_name() + + # Since "non-existent-model" doesn't exist in models.json, + # it should fall back to the first model in models.json ("gpt-5") + assert result == "gpt-5" + mock_get_value.assert_called_once_with("model") diff --git a/tests/test_console_ui_paths.py b/tests/test_console_ui_paths.py deleted file mode 100644 index 3531cc7d..00000000 --- a/tests/test_console_ui_paths.py +++ /dev/null @@ -1,32 +0,0 @@ -from code_puppy.tools.command_runner import share_your_reasoning -from code_puppy.tools.file_operations import list_files -from unittest.mock import patch - -# This test calls share_your_reasoning with reasoning only - - -def test_share_your_reasoning_plain(): - out = share_your_reasoning({}, reasoning="I reason with gusto!") - assert out["success"] - - -# This triggers tree output for multi-depth directories - - -def test_list_files_multi_level_tree(): - with ( - patch("os.path.abspath", return_value="/foo"), - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.walk") as mwalk, - patch( - "code_puppy.tools.file_operations.should_ignore_path", return_value=False - ), - patch("os.path.getsize", return_value=99), - ): - mwalk.return_value = [ - ("/foo", ["dir1"], ["a.py"]), - ("/foo/dir1", [], ["b.md", "c.txt"]), - ] - results = list_files(None, directory="/foo") - assert len(results) >= 3 # At least a.py, b.md, c.txt diff --git a/tests/test_delete_snippet_from_file.py b/tests/test_delete_snippet_from_file.py deleted file mode 100644 index 0042df92..00000000 --- a/tests/test_delete_snippet_from_file.py +++ /dev/null @@ -1,88 +0,0 @@ -from unittest.mock import patch, mock_open -from code_puppy.tools.file_modifications import delete_snippet_from_file - - -def test_delete_snippet_success(): - content = "This is foo text containing the SNIPPET to delete." - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data=content)) as m, - ): - # Snippet to delete that is present in the content - snippet = "SNIPPET" - # Our write should have the snippet removed - result = delete_snippet_from_file(None, "dummy_path", snippet) - assert result.get("success") is True - assert snippet not in m().write.call_args[0][0] - - -def test_delete_snippet_file_not_found(): - with patch("os.path.exists", return_value=False): - res = delete_snippet_from_file(None, "dummy_path", "SNIPPET") - assert "error" in res - - -def test_delete_snippet_not_a_file(): - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=False), - ): - res = delete_snippet_from_file(None, "dummy_path", "FOO") - assert "error" in res - - -def test_delete_snippet_snippet_not_found(): - content = "no such snippet here" - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data=content)), - ): - res = delete_snippet_from_file(None, "dummy_path", "SNIPPET_NOT_THERE") - assert "error" in res - - -def test_delete_snippet_no_changes(): - # The same as 'snippet not found', it should early return - content = "no match" - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data=content)), - ): - res = delete_snippet_from_file(None, "dummy_path", "notfound") - # Should return error as per actual code - assert "error" in res - assert "Snippet not found" in res["error"] - - -def test_delete_snippet_permission_error(): - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", side_effect=PermissionError("DENIED")), - ): - res = delete_snippet_from_file(None, "dummy_path", "foo") - assert "error" in res - - -def test_delete_snippet_filenotfounderror(): - # Even though checked above, simulate FileNotFoundError anyway - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", side_effect=FileNotFoundError("NO FILE")), - ): - res = delete_snippet_from_file(None, "dummy_path", "foo") - assert "error" in res - - -def test_delete_snippet_fails_with_unknown_exception(): - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", side_effect=Exception("kaboom")), - ): - res = delete_snippet_from_file(None, "dummy_path", "foo") - assert "error" in res and "kaboom" in res["error"] diff --git a/tests/test_file_modification_auxiliary.py b/tests/test_file_modification_auxiliary.py new file mode 100644 index 00000000..7afe6319 --- /dev/null +++ b/tests/test_file_modification_auxiliary.py @@ -0,0 +1,76 @@ +from code_puppy.tools import file_modifications + + +def test_replace_in_file_multiple_replacements(tmp_path): + path = tmp_path / "multi.txt" + path.write_text("foo bar baz bar foo") + reps = [ + {"old_str": "bar", "new_str": "dog"}, + {"old_str": "foo", "new_str": "biscuit"}, + ] + res = file_modifications._replace_in_file(None, str(path), reps) + assert res["success"] + assert "dog" in path.read_text() and "biscuit" in path.read_text() + + +def test_replace_in_file_unicode(tmp_path): + path = tmp_path / "unicode.txt" + path.write_text("puppy 🐶 says meow") + reps = [{"old_str": "meow", "new_str": "woof"}] + res = file_modifications._replace_in_file(None, str(path), reps) + assert res["success"] + assert "woof" in path.read_text() + + +def test_replace_in_file_near_match(tmp_path): + path = tmp_path / "fuzzy.txt" + path.write_text("abc\ndef\nghijk") + # deliberately off by one for fuzzy test + reps = [{"old_str": "def\nghij", "new_str": "replaced"}] + res = file_modifications._replace_in_file(None, str(path), reps) + # Depending on scoring, this may or may not match: just test schema + assert "diff" in res + + +def test_delete_large_snippet(tmp_path): + path = tmp_path / "bigdelete.txt" + content = "hello" + " fluff" * 500 + " bye" + path.write_text(content) + snippet = " fluff" * 250 + res = file_modifications._delete_snippet_from_file(None, str(path), snippet) + # Could still succeed or fail depending on split, just check key presence + assert "diff" in res + + +def test_write_to_file_invalid_path(tmp_path): + # Directory as filename + d = tmp_path / "adir" + d.mkdir() + res = file_modifications._write_to_file(None, str(d), "puppy", overwrite=False) + assert "error" in res or not res.get("success") + + +def test_replace_in_file_invalid_json(tmp_path): + path = tmp_path / "bad.txt" + path.write_text("hi there!") + # malformed replacements - not a list + reps = "this is definitely not json dicts" + try: + res = file_modifications._replace_in_file(None, str(path), reps) + except Exception: + assert True + else: + assert isinstance(res, dict) + + +def test_write_to_file_binary_content(tmp_path): + path = tmp_path / "binfile" + bin_content = b"\x00\x01biscuit\x02" + # Should not raise, but can't always expect 'success' either: just presence + try: + res = file_modifications._write_to_file( + None, str(path), bin_content.decode(errors="ignore"), overwrite=False + ) + assert "success" in res or "error" in res + except Exception: + assert True diff --git a/tests/test_file_modifications.py b/tests/test_file_modifications.py deleted file mode 100644 index e45f5841..00000000 --- a/tests/test_file_modifications.py +++ /dev/null @@ -1,73 +0,0 @@ -import pytest - -from unittest.mock import patch, mock_open -from code_puppy.tools.file_modifications import modify_file - - -def test_modify_file_append(): - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data="Original content")) as mock_file, - ): - result = modify_file(None, "dummy_path", " New content", "Original content") - assert result.get("success") - assert "New content" in mock_file().write.call_args[0][0] - - -def test_modify_file_target_replace(): - original_content = "Original content" - target_content = "Original" - proposed_content = "Modified" - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data=original_content)) as mock_file, - ): - result = modify_file(None, "dummy_path", proposed_content, target_content) - assert result.get("success") - assert proposed_content in mock_file().write.call_args[0][0] - - -def test_modify_file_no_changes(): - original_content = "Original content" - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isfile", return_value=True), - patch("builtins.open", mock_open(read_data=original_content)), - ): - result = modify_file(None, "dummy_path", original_content, original_content) - assert not result.get("changed") - assert result.get("message") == "No changes to apply." - - -@pytest.mark.parametrize("file_exists", [True, False]) -def test_modify_file_file_not_exist(file_exists): - with patch("os.path.exists", return_value=file_exists): - if not file_exists: - result = modify_file(None, "dummy_path", "content", "content") - assert "error" in result - else: - with ( - patch("os.path.isfile", return_value=True), - patch( - "builtins.open", mock_open(read_data="Original content") - ) as mock_file, - ): - result = modify_file( - None, "dummy_path", " New content", "Original content" - ) - assert result.get("success") - assert "New content" in mock_file().write.call_args[0][0] - - -def test_modify_file_file_is_directory(): - from code_puppy.tools.file_modifications import modify_file - - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - ): - result = modify_file(None, "dummy_path", "some change", "some change") - assert "error" in result - assert result.get("changed") is None diff --git a/tests/test_file_operations.py b/tests/test_file_operations.py deleted file mode 100644 index 138758d6..00000000 --- a/tests/test_file_operations.py +++ /dev/null @@ -1,49 +0,0 @@ -from unittest.mock import patch, mock_open -from code_puppy.tools.file_operations import list_files, create_file, read_file - - -def test_create_file(): - test_file = "test_create.txt" - m = mock_open() - with ( - patch("os.path.exists") as mock_exists, - patch("builtins.open", m), - ): - mock_exists.return_value = False - result = create_file(None, test_file, "content") - assert "success" in result - assert result["success"] - assert result["path"].endswith(test_file) - - -def test_read_file(): - test_file = "test_read.txt" - m = mock_open(read_data="line1\nline2\nline3") - with ( - patch("os.path.exists") as mock_exists, - patch("os.path.isfile") as mock_isfile, - patch("builtins.open", m), - ): - mock_exists.return_value = True - mock_isfile.return_value = True - result = read_file(None, test_file) - assert "content" in result - - -def test_list_files_permission_error_on_getsize(tmp_path): - # Create a directory and pretend a file exists, but getsize fails - fake_dir = tmp_path - fake_file = fake_dir / "file.txt" - fake_file.write_text("hello") - with ( - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.walk", return_value=[(str(fake_dir), [], ["file.txt"])]), - patch( - "code_puppy.tools.file_operations.should_ignore_path", return_value=False - ), - patch("os.path.getsize", side_effect=PermissionError), - ): - result = list_files(None, directory=str(fake_dir)) - # Should not throw, just quietly ignore - assert all(f["type"] != "file" or f["path"] != "file.txt" for f in result) diff --git a/tests/test_file_operations_icons.py b/tests/test_file_operations_icons.py deleted file mode 100644 index 7297242f..00000000 --- a/tests/test_file_operations_icons.py +++ /dev/null @@ -1,37 +0,0 @@ -from code_puppy.tools.file_operations import list_files -from unittest.mock import patch - -all_types = [ - "main.py", - "frontend.js", - "component.tsx", - "layout.html", - "styles.css", - "README.md", - "config.yaml", - "image.png", - "music.mp3", - "movie.mp4", - "report.pdf", - "archive.zip", - "binary.exe", - "oddfile.unknown", -] - - -def test_list_files_get_file_icon_full_coverage(): - fake_entries = [("/repo", [], all_types)] - with ( - patch("os.path.abspath", return_value="/repo"), - patch("os.path.exists", return_value=True), - patch("os.path.isdir", return_value=True), - patch("os.walk", return_value=fake_entries), - patch( - "code_puppy.tools.file_operations.should_ignore_path", return_value=False - ), - patch("os.path.getsize", return_value=420), - ): - results = list_files(None, directory="/repo") - paths = set(f["path"] for f in results) - for p in all_types: - assert p in paths diff --git a/tests/test_json_agents.py b/tests/test_json_agents.py new file mode 100644 index 00000000..92baabb2 --- /dev/null +++ b/tests/test_json_agents.py @@ -0,0 +1,282 @@ +"""Tests for JSON agent functionality.""" + +import json +import os +import tempfile +from pathlib import Path +from unittest.mock import patch + +import pytest + +from code_puppy.agents.base_agent import BaseAgent +from code_puppy.agents.json_agent import JSONAgent, discover_json_agents +from code_puppy.config import get_user_agents_directory + + +class TestJSONAgent: + """Test JSON agent functionality.""" + + @pytest.fixture + def sample_json_config(self): + """Sample JSON agent configuration.""" + return { + "name": "test-agent", + "display_name": "Test Agent 🧪", + "description": "A test agent for unit testing", + "system_prompt": "You are a test agent.", + "tools": ["list_files", "read_file", "edit_file"], + "user_prompt": "Enter your test request:", + "tools_config": {"timeout": 30}, + } + + @pytest.fixture + def sample_json_config_with_list_prompt(self): + """Sample JSON agent configuration with list-based system prompt.""" + return { + "name": "list-prompt-agent", + "description": "Agent with list-based system prompt", + "system_prompt": [ + "You are a helpful assistant.", + "You help users with coding tasks.", + "Always be polite and professional.", + ], + "tools": ["list_files", "read_file"], + } + + @pytest.fixture + def temp_json_file(self, sample_json_config): + """Create a temporary JSON file with sample config.""" + with tempfile.NamedTemporaryFile( + mode="w", suffix="-agent.json", delete=False + ) as f: + json.dump(sample_json_config, f) + temp_path = f.name + + yield temp_path + + # Cleanup + if os.path.exists(temp_path): + os.unlink(temp_path) + + def test_json_agent_loading(self, temp_json_file): + """Test loading a JSON agent from file.""" + agent = JSONAgent(temp_json_file) + + assert agent.name == "test-agent" + assert agent.display_name == "Test Agent 🧪" + assert agent.description == "A test agent for unit testing" + assert agent.get_system_prompt() == "You are a test agent." + assert agent.get_user_prompt() == "Enter your test request:" + assert agent.get_tools_config() == {"timeout": 30} + + def test_json_agent_with_list_prompt(self, sample_json_config_with_list_prompt): + """Test JSON agent with list-based system prompt.""" + with tempfile.NamedTemporaryFile( + mode="w", suffix="-agent.json", delete=False + ) as f: + json.dump(sample_json_config_with_list_prompt, f) + temp_path = f.name + + try: + agent = JSONAgent(temp_path) + + assert agent.name == "list-prompt-agent" + assert agent.display_name == "List-Prompt-Agent 🤖" # Fallback display name + + # List-based prompt should be joined with newlines + expected_prompt = "\n".join( + [ + "You are a helpful assistant.", + "You help users with coding tasks.", + "Always be polite and professional.", + ] + ) + assert agent.get_system_prompt() == expected_prompt + + finally: + if os.path.exists(temp_path): + os.unlink(temp_path) + + def test_json_agent_available_tools(self, temp_json_file): + """Test that JSON agent filters tools correctly.""" + agent = JSONAgent(temp_json_file) + tools = agent.get_available_tools() + + # Should only return tools that exist in our registry + # "final_result" from JSON should be filtered out + expected_tools = ["list_files", "read_file", "edit_file"] + assert tools == expected_tools + + def test_json_agent_inheritance(self, temp_json_file): + """Test that JSONAgent properly inherits from BaseAgent.""" + agent = JSONAgent(temp_json_file) + + assert isinstance(agent, BaseAgent) + assert hasattr(agent, "name") + assert hasattr(agent, "display_name") + assert hasattr(agent, "description") + assert callable(agent.get_system_prompt) + assert callable(agent.get_available_tools) + + def test_invalid_json_file(self): + """Test handling of invalid JSON files.""" + with tempfile.NamedTemporaryFile( + mode="w", suffix="-agent.json", delete=False + ) as f: + f.write("invalid json content") + temp_path = f.name + + try: + with pytest.raises(ValueError, match="Failed to load JSON agent config"): + JSONAgent(temp_path) + finally: + if os.path.exists(temp_path): + os.unlink(temp_path) + + def test_missing_required_fields(self): + """Test handling of JSON with missing required fields.""" + incomplete_config = { + "name": "incomplete-agent" + # Missing description, system_prompt, tools + } + + with tempfile.NamedTemporaryFile( + mode="w", suffix="-agent.json", delete=False + ) as f: + json.dump(incomplete_config, f) + temp_path = f.name + + try: + with pytest.raises(ValueError, match="Missing required field"): + JSONAgent(temp_path) + finally: + if os.path.exists(temp_path): + os.unlink(temp_path) + + def test_invalid_tools_field(self): + """Test handling of invalid tools field.""" + invalid_config = { + "name": "invalid-tools-agent", + "description": "Test agent", + "system_prompt": "Test prompt", + "tools": "not a list", # Should be a list + } + + with tempfile.NamedTemporaryFile( + mode="w", suffix="-agent.json", delete=False + ) as f: + json.dump(invalid_config, f) + temp_path = f.name + + try: + with pytest.raises(ValueError, match="'tools' must be a list"): + JSONAgent(temp_path) + finally: + if os.path.exists(temp_path): + os.unlink(temp_path) + + +class TestJSONAgentDiscovery: + """Test JSON agent discovery functionality.""" + + def test_discover_json_agents(self, monkeypatch): + """Test discovering JSON agents in the user directory.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Mock the agents directory to use our temp directory + monkeypatch.setattr( + "code_puppy.config.get_user_agents_directory", lambda: temp_dir + ) + + # Create valid JSON agent + agent1_config = { + "name": "agent1", + "description": "First agent", + "system_prompt": "Agent 1 prompt", + "tools": ["list_files"], + } + agent1_path = ( + Path(temp_dir) / "agent1.json" + ) # Changed from agent1-agent.json + with open(agent1_path, "w") as f: + json.dump(agent1_config, f) + + # Create another valid JSON agent + agent2_config = { + "name": "agent2", + "description": "Second agent", + "system_prompt": "Agent 2 prompt", + "tools": ["read_file"], + } + agent2_path = Path(temp_dir) / "custom-agent.json" + with open(agent2_path, "w") as f: + json.dump(agent2_config, f) + + # Create invalid JSON file (should be skipped) + invalid_path = ( + Path(temp_dir) / "invalid.json" + ) # Changed from invalid-agent.json + with open(invalid_path, "w") as f: + f.write("invalid json") + + # Create non-agent JSON file (should be skipped) + other_path = Path(temp_dir) / "other.json" + with open(other_path, "w") as f: + json.dump({"not": "an agent"}, f) + + # Discover agents + agents = discover_json_agents() + + # Should find only the two valid agents + assert len(agents) == 2 + assert "agent1" in agents + assert "agent2" in agents + assert agents["agent1"] == str(agent1_path) + assert agents["agent2"] == str(agent2_path) + + def test_discover_nonexistent_directory(self, monkeypatch): + """Test discovering agents when directory doesn't exist.""" + # Mock the agents directory to point to non-existent directory + monkeypatch.setattr( + "code_puppy.config.get_user_agents_directory", + lambda: "/nonexistent/directory", + ) + agents = discover_json_agents() + assert agents == {} + + def test_get_user_agents_directory(self): + """Test getting user agents directory.""" + user_dir = get_user_agents_directory() + + assert isinstance(user_dir, str) + assert ".code_puppy" in user_dir + assert "agents" in user_dir + + # Directory should be created + assert Path(user_dir).exists() + assert Path(user_dir).is_dir() + + def test_user_agents_directory_windows(self, monkeypatch): + """Test user agents directory cross-platform consistency.""" + mock_agents_dir = "/fake/home/.code_puppy/agents" + + # Override the AGENTS_DIR constant directly + monkeypatch.setattr("code_puppy.config.AGENTS_DIR", mock_agents_dir) + + with patch("code_puppy.config.os.makedirs") as mock_makedirs: + user_dir = get_user_agents_directory() + + assert user_dir == mock_agents_dir + mock_makedirs.assert_called_once_with(mock_agents_dir, exist_ok=True) + + def test_user_agents_directory_macos(self, monkeypatch): + """Test user agents directory on macOS.""" + mock_agents_dir = "/fake/home/.code_puppy/agents" + + # Override the AGENTS_DIR constant directly + monkeypatch.setattr("code_puppy.config.AGENTS_DIR", mock_agents_dir) + + with patch("code_puppy.config.os.makedirs") as mock_makedirs: + user_dir = get_user_agents_directory() + + assert user_dir == mock_agents_dir + mock_makedirs.assert_called_once_with(mock_agents_dir, exist_ok=True) diff --git a/tests/test_load_context_completion.py b/tests/test_load_context_completion.py new file mode 100644 index 00000000..54ce0cee --- /dev/null +++ b/tests/test_load_context_completion.py @@ -0,0 +1,126 @@ +import tempfile +from pathlib import Path +from unittest.mock import patch + +from prompt_toolkit.document import Document + +from code_puppy.command_line.load_context_completion import LoadContextCompleter + + +class TestLoadContextCompleter: + def setup_method(self): + self.completer = LoadContextCompleter() + + def test_trigger_detection(self): + """Test that the completer only activates for /load_context commands.""" + # Should activate + doc = Document("/load_context") + completions = list(self.completer.get_completions(doc, None)) + assert len(completions) >= 0 # At least doesn't crash + + # Should not activate + doc = Document("/other_command") + completions = list(self.completer.get_completions(doc, None)) + assert len(completions) == 0 + + doc = Document("regular text") + completions = list(self.completer.get_completions(doc, None)) + assert len(completions) == 0 + + def test_space_completion(self): + """Test that typing just /load_context suggests adding a space.""" + doc = Document("/load_context") + completions = list(self.completer.get_completions(doc, None)) + + assert len(completions) == 1 + assert completions[0].text == "/load_context " + # display_meta might be a FormattedText object, so convert to string + display_meta = str(completions[0].display_meta) + assert "load saved context" in display_meta + + def test_session_name_completion(self): + """Test that available session files are suggested for completion.""" + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "code_puppy.command_line.load_context_completion.CONFIG_DIR", temp_dir + ): + # Create contexts directory with some test files + contexts_dir = Path(temp_dir) / "contexts" + contexts_dir.mkdir() + + # Create test context files + (contexts_dir / "session1.pkl").touch() + (contexts_dir / "session2.pkl").touch() + (contexts_dir / "another_session.pkl").touch() + (contexts_dir / "not_a_pkl.txt").touch() # Should be ignored + + # Test completion with space + doc = Document("/load_context ") + completions = list(self.completer.get_completions(doc, None)) + + # Should suggest all .pkl files (without extension) + completion_texts = [c.text for c in completions] + assert "session1" in completion_texts + assert "session2" in completion_texts + assert "another_session" in completion_texts + assert "not_a_pkl" not in completion_texts # .txt files ignored + + # All should have proper metadata + for completion in completions: + display_meta = str(completion.display_meta) + assert "saved context session" in display_meta + + def test_partial_session_name_completion(self): + """Test that partial session names are filtered correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "code_puppy.command_line.load_context_completion.CONFIG_DIR", temp_dir + ): + # Create contexts directory with some test files + contexts_dir = Path(temp_dir) / "contexts" + contexts_dir.mkdir() + + # Create test context files + (contexts_dir / "session1.pkl").touch() + (contexts_dir / "session2.pkl").touch() + (contexts_dir / "another_session.pkl").touch() + + # Test completion with partial match + doc = Document("/load_context sess") + completions = list(self.completer.get_completions(doc, None)) + + # Should only suggest files starting with "sess" + completion_texts = [c.text for c in completions] + assert "session1" in completion_texts + assert "session2" in completion_texts + assert ( + "another_session" not in completion_texts + ) # Doesn't start with "sess" + + def test_no_contexts_directory(self): + """Test behavior when contexts directory doesn't exist.""" + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "code_puppy.command_line.load_context_completion.CONFIG_DIR", temp_dir + ): + # Don't create contexts directory + + # Test completion - should not crash + doc = Document("/load_context ") + completions = list(self.completer.get_completions(doc, None)) + + # Should return empty list, not crash + assert completions == [] + + def test_whitespace_handling(self): + """Test that leading whitespace is handled correctly.""" + # Test with leading spaces + doc = Document(" /load_context") + completions = list(self.completer.get_completions(doc, None)) + assert len(completions) == 1 + assert completions[0].text == "/load_context " + + # Test with tabs + doc = Document("\t/load_context ") + completions = list(self.completer.get_completions(doc, None)) + assert len(completions) >= 0 # At least doesn't crash diff --git a/tests/test_model_factory.py b/tests/test_model_factory.py new file mode 100644 index 00000000..c1886ca0 --- /dev/null +++ b/tests/test_model_factory.py @@ -0,0 +1,229 @@ +import os + +import pytest + +from code_puppy.model_factory import ModelFactory + +TEST_CONFIG_PATH = os.path.join(os.path.dirname(__file__), "../code_puppy/models.json") + + +def test_ollama_load_model(): + config = ModelFactory.load_config() + + # Skip test if 'ollama-llama2' model is not in config + if "ollama-llama2" not in config: + pytest.skip("Model 'ollama-llama2' not found in configuration, skipping test.") + + model = ModelFactory.get_model("ollama-llama2", config) + assert hasattr(model, "provider") + assert model.provider.model_name == "llama2" + assert "chat" in dir(model), "OllamaModel must have a .chat method!" + + +def test_anthropic_load_model(): + config = ModelFactory.load_config() + if "anthropic-test" not in config: + pytest.skip("Model 'anthropic-test' not found in configuration, skipping test.") + if not os.environ.get("ANTHROPIC_API_KEY"): + pytest.skip("ANTHROPIC_API_KEY not set in environment, skipping test.") + + model = ModelFactory.get_model("anthropic-test", config) + assert hasattr(model, "provider") + assert hasattr(model.provider, "anthropic_client") + # Note: Do not make actual Anthropic network calls in CI, just validate instantiation. + + +def test_missing_model(): + config = {"foo": {"type": "openai", "name": "bar"}} + with pytest.raises(ValueError): + ModelFactory.get_model("not-there", config) + + +def test_unsupported_type(): + config = {"bad": {"type": "doesnotexist", "name": "fake"}} + with pytest.raises(ValueError): + ModelFactory.get_model("bad", config) + + +def test_env_var_reference_azure(monkeypatch): + monkeypatch.setenv("AZ_URL", "https://mock-endpoint.openai.azure.com") + monkeypatch.setenv("AZ_VERSION", "2023-05-15") + monkeypatch.setenv("AZ_KEY", "supersecretkey") + config = { + "azmodel": { + "type": "azure_openai", + "name": "az", + "azure_endpoint": "$AZ_URL", + "api_version": "$AZ_VERSION", + "api_key": "$AZ_KEY", + } + } + model = ModelFactory.get_model("azmodel", config) + assert model.client is not None + + +def test_custom_endpoint_missing_url(): + config = { + "custom": { + "type": "custom_openai", + "name": "mycust", + "custom_endpoint": {"headers": {}}, + } + } + with pytest.raises(ValueError): + ModelFactory.get_model("custom", config) + + +# Additional tests for coverage +def test_get_custom_config_missing_custom_endpoint(): + from code_puppy.model_factory import get_custom_config + + with pytest.raises(ValueError): + get_custom_config({}) + + +def test_get_custom_config_missing_url(): + from code_puppy.model_factory import get_custom_config + + config = {"custom_endpoint": {"headers": {}}} + with pytest.raises(ValueError): + get_custom_config(config) + + +def test_gemini_load_model(monkeypatch): + monkeypatch.setenv("GEMINI_API_KEY", "dummy-value") + config = {"gemini": {"type": "gemini", "name": "gemini-pro"}} + model = ModelFactory.get_model("gemini", config) + assert model is not None + assert hasattr(model, "provider") + + +def test_openai_load_model(monkeypatch): + monkeypatch.setenv("OPENAI_API_KEY", "fake-key") + config = {"openai": {"type": "openai", "name": "fake-openai-model"}} + model = ModelFactory.get_model("openai", config) + assert model is not None + assert hasattr(model, "provider") + + +def test_custom_openai_happy(monkeypatch): + monkeypatch.setenv("OPENAI_API_KEY", "ok") + config = { + "custom": { + "type": "custom_openai", + "name": "cust", + "custom_endpoint": { + "url": "https://fake.url", + "headers": {"X-Api-Key": "$OPENAI_API_KEY"}, + "ca_certs_path": False, + "api_key": "$OPENAI_API_KEY", + }, + } + } + model = ModelFactory.get_model("custom", config) + assert model is not None + assert hasattr(model.provider, "base_url") + + +def test_anthropic_missing_api_key(monkeypatch): + config = {"anthropic": {"type": "anthropic", "name": "claude-v2"}} + if "ANTHROPIC_API_KEY" in os.environ: + monkeypatch.delenv("ANTHROPIC_API_KEY") + with pytest.raises(ValueError): + ModelFactory.get_model("anthropic", config) + + +def test_azure_missing_endpoint(): + config = { + "az1": { + "type": "azure_openai", + "name": "az", + "api_version": "2023", + "api_key": "val", + } + } + with pytest.raises(ValueError): + ModelFactory.get_model("az1", config) + + +def test_azure_missing_apiversion(): + config = { + "az2": { + "type": "azure_openai", + "name": "az", + "azure_endpoint": "foo", + "api_key": "val", + } + } + with pytest.raises(ValueError): + ModelFactory.get_model("az2", config) + + +def test_azure_missing_apikey(): + config = { + "az3": { + "type": "azure_openai", + "name": "az", + "azure_endpoint": "foo", + "api_version": "1.0", + } + } + with pytest.raises(ValueError): + ModelFactory.get_model("az3", config) + + +def test_custom_anthropic_missing_url(): + config = { + "x": { + "type": "custom_anthropic", + "name": "ya", + "custom_endpoint": {"headers": {}}, + } + } + with pytest.raises(ValueError): + ModelFactory.get_model("x", config) + + +def test_extra_models_json_decode_error(tmp_path, monkeypatch): + # Create a temporary extra_models.json file with invalid JSON + extra_models_file = tmp_path / "extra_models.json" + extra_models_file.write_text("{ invalid json content }") + + # Patch the EXTRA_MODELS_FILE path to point to our temporary file + from code_puppy.model_factory import ModelFactory + + monkeypatch.setattr( + "code_puppy.model_factory.EXTRA_MODELS_FILE", str(extra_models_file) + ) + + # This should not raise an exception despite the invalid JSON + config = ModelFactory.load_config() + + # The config should still be loaded, just without the extra models + assert isinstance(config, dict) + assert len(config) > 0 + + +def test_extra_models_exception_handling(tmp_path, monkeypatch, caplog): + # Create a temporary extra_models.json file that will raise a general exception + extra_models_file = tmp_path / "extra_models.json" + # Create a directory with the same name to cause an OSError when trying to read it + extra_models_file.mkdir() + + # Patch the EXTRA_MODELS_FILE path + from code_puppy.model_factory import ModelFactory + + monkeypatch.setattr( + "code_puppy.model_factory.EXTRA_MODELS_FILE", str(extra_models_file) + ) + + # This should not raise an exception despite the error + with caplog.at_level("WARNING"): + config = ModelFactory.load_config() + + # The config should still be loaded + assert isinstance(config, dict) + assert len(config) > 0 + + # Check that warning was logged + assert "Failed to load extra models config" in caplog.text diff --git a/tests/test_prompt_toolkit_completion.py b/tests/test_prompt_toolkit_completion.py new file mode 100644 index 00000000..7c3a7b15 --- /dev/null +++ b/tests/test_prompt_toolkit_completion.py @@ -0,0 +1,638 @@ +import os +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from prompt_toolkit.document import Document +from prompt_toolkit.formatted_text import FormattedText +from prompt_toolkit.keys import Keys + +from prompt_toolkit.buffer import Buffer +from prompt_toolkit.layout.controls import BufferControl +from prompt_toolkit.layout.processors import TransformationInput + +from code_puppy.command_line.prompt_toolkit_completion import ( + AttachmentPlaceholderProcessor, + CDCompleter, + FilePathCompleter, + SetCompleter, + get_input_with_combined_completion, +) + + +def setup_files(tmp_path): + d = tmp_path / "dir" + d.mkdir() + (d / "file1.txt").write_text("content1") + (d / "file2.py").write_text("content2") + (tmp_path / "file3.txt").write_text("hi") + (tmp_path / ".hiddenfile").write_text("sneaky") + return d + + +def test_no_symbol(tmp_path): + completer = FilePathCompleter(symbol="@") + doc = Document(text="no_completion_here", cursor_position=7) + completions = list(completer.get_completions(doc, None)) + assert completions == [] + + +def test_completion_basic(tmp_path, monkeypatch): + setup_files(tmp_path) + cwd = os.getcwd() + os.chdir(tmp_path) + try: + completer = FilePathCompleter(symbol="@") + doc = Document(text="run @fi", cursor_position=7) + completions = list(completer.get_completions(doc, None)) + # Should see file3.txt from the base dir, but NOT .hiddenfile + values = {c.text for c in completions} + assert any("file3.txt" in v for v in values) + assert not any(".hiddenfile" in v for v in values) + finally: + os.chdir(cwd) + + +def test_completion_directory_listing(tmp_path): + d = setup_files(tmp_path) + completer = FilePathCompleter(symbol="@") + # Set cwd so dir lookup matches. Fix cursor position off by one. + cwd = os.getcwd() + os.chdir(tmp_path) + try: + text = f"test @{d.name}/" + doc = Document(text=text, cursor_position=len(text)) + completions = list(completer.get_completions(doc, None)) + # In modern prompt_toolkit, display is a FormattedText: a list of (style, text) tuples + filenames = { + c.display[0][1] if hasattr(c.display, "__getitem__") else str(c.display) + for c in completions + } + assert "file1.txt" in filenames + assert "file2.py" in filenames + finally: + os.chdir(cwd) + + +def test_completion_symbol_in_middle(tmp_path): + setup_files(tmp_path) + completer = FilePathCompleter(symbol="@") + cwd = os.getcwd() + os.chdir(tmp_path) + try: + doc = Document(text="echo @fi then something", cursor_position=7) + completions = list(completer.get_completions(doc, None)) + assert any("file3.txt" in c.text for c in completions) + finally: + os.chdir(cwd) + + +def test_completion_with_hidden_file(tmp_path): + # Should show hidden files if user types starting with . + setup_files(tmp_path) + completer = FilePathCompleter(symbol="@") + cwd = os.getcwd() + os.chdir(tmp_path) + try: + doc = Document(text="@.", cursor_position=2) + completions = list(completer.get_completions(doc, None)) + assert any(".hiddenfile" in c.text for c in completions) + finally: + os.chdir(cwd) + + +def test_completion_handles_permissionerror(monkeypatch): + # Patch os.listdir to explode! + completer = FilePathCompleter(symbol="@") + + def explode(path): + raise PermissionError + + monkeypatch.setattr(os, "listdir", explode) + doc = Document(text="@", cursor_position=1) + # Should not raise: + list(completer.get_completions(doc, None)) + + +def test_set_completer_on_non_trigger(): + completer = SetCompleter() + doc = Document(text="not_a_set_command") + assert list(completer.get_completions(doc, None)) == [] + + +def test_set_completer_exact_trigger(monkeypatch): + completer = SetCompleter() + doc = Document(text="/set", cursor_position=len("/set")) + completions = list(completer.get_completions(doc, None)) + assert len(completions) == 1 + assert completions[0].text == "/set " # Check the actual text to be inserted + # display_meta can be FormattedText, so access its content + assert completions[0].display_meta[0][1] == "set config key" + + +def test_set_completer_on_set_trigger(monkeypatch): + # Simulate config keys + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_config_keys", + lambda: ["foo", "bar"], + ) + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_value", + lambda key: "woo" if key == "foo" else None, + ) + completer = SetCompleter() + doc = Document(text="/set ", cursor_position=len("/set ")) + completions = list(completer.get_completions(doc, None)) + completion_texts = sorted([c.text for c in completions]) + completion_metas = sorted( + [c.display_meta for c in completions] + ) # Corrected display_meta access + + # The completer now provides 'key = value' as text, not '/set key = value' + assert completion_texts == sorted(["bar = ", "foo = woo"]) + # Display meta should be empty now + assert len(completion_metas) == 2 + for meta in completion_metas: + assert isinstance(meta, FormattedText) + assert len(meta) == 1 + assert meta[0][1] == "" + + +def test_set_completer_partial_key(monkeypatch): + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_config_keys", + lambda: ["long_key_name", "other_key", "model"], + ) + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_value", + lambda key: "value_for_" + key if key == "long_key_name" else None, + ) + completer = SetCompleter() + + doc = Document(text="/set long_k", cursor_position=len("/set long_k")) + completions = list(completer.get_completions(doc, None)) + assert len(completions) == 1 + # `text` for partial key completion should be the key itself and its value part + assert completions[0].text == "long_key_name = value_for_long_key_name" + # Display meta should be empty now + assert isinstance(completions[0].display_meta, FormattedText) + assert len(completions[0].display_meta) == 1 + assert completions[0].display_meta[0][1] == "" + + doc = Document(text="/set oth", cursor_position=len("/set oth")) + completions = list(completer.get_completions(doc, None)) + assert len(completions) == 1 + assert completions[0].text == "other_key = " + # Display meta should be empty now + assert isinstance(completions[0].display_meta, FormattedText) + assert len(completions[0].display_meta) == 1 + assert completions[0].display_meta[0][1] == "" + + +def test_set_completer_excludes_model_key(monkeypatch): + # Ensure 'model' is a config key but SetCompleter doesn't offer it + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_config_keys", + lambda: ["api_key", "model", "temperature"], + ) + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_value", + lambda key: "test_value", + ) + completer = SetCompleter() + + # Test with full "model" typed + doc = Document(text="/set model", cursor_position=len("/set model")) + completions = list(completer.get_completions(doc, None)) + assert completions == [], ( + "SetCompleter should not complete for 'model' key directly" + ) + + # Test with partial "mo" that would match "model" + doc = Document(text="/set mo", cursor_position=len("/set mo")) + completions = list(completer.get_completions(doc, None)) + assert completions == [], ( + "SetCompleter should not complete for 'model' key even partially" + ) + + # Ensure other keys are still completed + doc = Document(text="/set api", cursor_position=len("/set api")) + completions = list(completer.get_completions(doc, None)) + assert len(completions) == 1 + assert completions[0].text == "api_key = test_value" + + +def test_set_completer_excludes_puppy_token(monkeypatch): + # Ensure 'puppy_token' is a config key but SetCompleter doesn't offer it + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_config_keys", + lambda: ["puppy_token", "user_name", "temp_dir"], + ) + monkeypatch.setattr( + "code_puppy.command_line.prompt_toolkit_completion.get_value", + lambda key: "sensitive_token_value" if key == "puppy_token" else "normal_value", + ) + completer = SetCompleter() + + # Test with full "puppy_token" typed + doc = Document(text="/set puppy_token", cursor_position=len("/set puppy_token")) + completions = list(completer.get_completions(doc, None)) + assert completions == [], ( + "SetCompleter should not complete for 'puppy_token' key directly" + ) + + # Test with partial "puppy" that would match "puppy_token" + doc = Document(text="/set puppy", cursor_position=len("/set puppy")) + completions = list(completer.get_completions(doc, None)) + assert completions == [], ( + "SetCompleter should not complete for 'puppy_token' key even partially" + ) + + # Ensure other keys are still completed + doc = Document(text="/set user", cursor_position=len("/set user")) + completions = list(completer.get_completions(doc, None)) + assert len(completions) == 1 + assert completions[0].text == "user_name = normal_value" + + +def test_set_completer_no_match(monkeypatch): + monkeypatch.setattr("code_puppy.config.get_config_keys", lambda: ["actual_key"]) + completer = SetCompleter() + doc = Document(text="/set non_existent", cursor_position=len("/set non_existent")) + completions = list(completer.get_completions(doc, None)) + assert completions == [] + + +def test_cd_completer_on_non_trigger(): + completer = CDCompleter() + doc = Document(text="something_else") + assert list(completer.get_completions(doc, None)) == [] + + +@pytest.fixture +def setup_cd_test_dirs(tmp_path): + # Current working directory structure + (tmp_path / "dir1").mkdir() + (tmp_path / "dir2_long_name").mkdir() + (tmp_path / "another_dir").mkdir() + (tmp_path / "file_not_dir.txt").write_text("hello") + + # Home directory structure for testing '~' expansion + mock_home_path = tmp_path / "mock_home" / "user" + mock_home_path.mkdir(parents=True, exist_ok=True) + (mock_home_path / "Documents").mkdir() + (mock_home_path / "Downloads").mkdir() + (mock_home_path / "Desktop").mkdir() + return tmp_path, mock_home_path + + +def test_cd_completer_initial_trigger(setup_cd_test_dirs, monkeypatch): + tmp_path, _ = setup_cd_test_dirs + monkeypatch.chdir(tmp_path) + completer = CDCompleter() + doc = Document(text="/cd ", cursor_position=len("/cd ")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + displays = sorted( + [ + "".join(item[1] for item in c.display) + if isinstance(c.display, list) + else str(c.display) + for c in completions + ] + ) + + # mock_home is also created at the root of tmp_path by the fixture + assert texts == sorted(["another_dir/", "dir1/", "dir2_long_name/", "mock_home/"]) + assert displays == sorted( + ["another_dir/", "dir1/", "dir2_long_name/", "mock_home/"] + ) + assert not any("file_not_dir.txt" in t for t in texts) + + +def test_cd_completer_partial_name(setup_cd_test_dirs, monkeypatch): + tmp_path, _ = setup_cd_test_dirs + monkeypatch.chdir(tmp_path) + completer = CDCompleter() + doc = Document(text="/cd di", cursor_position=len("/cd di")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + assert texts == sorted(["dir1/", "dir2_long_name/"]) + assert "another_dir/" not in texts + + +def test_cd_completer_sub_directory(setup_cd_test_dirs, monkeypatch): + tmp_path, _ = setup_cd_test_dirs + # Create a subdirectory with content + sub_dir = tmp_path / "dir1" / "sub1" + sub_dir.mkdir(parents=True) + (tmp_path / "dir1" / "sub2_another").mkdir() + + monkeypatch.chdir(tmp_path) + completer = CDCompleter() + doc = Document(text="/cd dir1/", cursor_position=len("/cd dir1/")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + # Completions should be relative to the 'base' typed in the command, which is 'dir1/' + # So, the 'text' part of completion should be 'dir1/sub1/' and 'dir1/sub2_another/' + assert texts == sorted(["dir1/sub1/", "dir1/sub2_another/"]) + displays = sorted(["".join(item[1] for item in c.display) for c in completions]) + assert displays == sorted(["sub1/", "sub2_another/"]) + + +def test_cd_completer_partial_sub_directory(setup_cd_test_dirs, monkeypatch): + tmp_path, _ = setup_cd_test_dirs + sub_dir = tmp_path / "dir1" / "sub_alpha" + sub_dir.mkdir(parents=True) + (tmp_path / "dir1" / "sub_beta").mkdir() + + monkeypatch.chdir(tmp_path) + completer = CDCompleter() + doc = Document(text="/cd dir1/sub_a", cursor_position=len("/cd dir1/sub_a")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + assert texts == ["dir1/sub_alpha/"] + displays = sorted(["".join(item[1] for item in c.display) for c in completions]) + assert displays == ["sub_alpha/"] + + +def test_cd_completer_home_directory_expansion(setup_cd_test_dirs, monkeypatch): + _, mock_home_path = setup_cd_test_dirs + monkeypatch.setattr( + os.path, "expanduser", lambda p: p.replace("~", str(mock_home_path)) + ) + # We don't chdir here, as ~ expansion should work irrespective of cwd + + completer = CDCompleter() + doc = Document(text="/cd ~/", cursor_position=len("/cd ~/")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + displays = sorted(["".join(item[1] for item in c.display) for c in completions]) + + # The 'text' should include the '~/' prefix as that's what the user typed as base + assert texts == sorted(["~/Desktop/", "~/Documents/", "~/Downloads/"]) + assert displays == sorted(["Desktop/", "Documents/", "Downloads/"]) + + +def test_cd_completer_home_directory_expansion_partial(setup_cd_test_dirs, monkeypatch): + _, mock_home_path = setup_cd_test_dirs + monkeypatch.setattr( + os.path, "expanduser", lambda p: p.replace("~", str(mock_home_path)) + ) + + completer = CDCompleter() + doc = Document(text="/cd ~/Do", cursor_position=len("/cd ~/Do")) + completions = list(completer.get_completions(doc, None)) + texts = sorted([c.text for c in completions]) + displays = sorted(["".join(item[1] for item in c.display) for c in completions]) + + assert texts == sorted(["~/Documents/", "~/Downloads/"]) + assert displays == sorted(["Documents/", "Downloads/"]) + assert "~/Desktop/" not in texts + + +def test_cd_completer_non_existent_base(setup_cd_test_dirs, monkeypatch): + tmp_path, _ = setup_cd_test_dirs + monkeypatch.chdir(tmp_path) + completer = CDCompleter() + doc = Document( + text="/cd non_existent_dir/", cursor_position=len("/cd non_existent_dir/") + ) + completions = list(completer.get_completions(doc, None)) + assert completions == [] + + +def test_cd_completer_permission_error_silently_handled(monkeypatch): + completer = CDCompleter() + # Patch the utility function used by CDCompleter + with patch( + "code_puppy.command_line.prompt_toolkit_completion.list_directory", + side_effect=PermissionError, + ) as mock_list_dir: + doc = Document(text="/cd somedir/", cursor_position=len("/cd somedir/")) + completions = list(completer.get_completions(doc, None)) + assert completions == [] + mock_list_dir.assert_called_once() + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +@patch("code_puppy.command_line.prompt_toolkit_completion.FileHistory") +@patch("code_puppy.command_line.prompt_toolkit_completion.update_model_in_input") +@patch("code_puppy.command_line.prompt_toolkit_completion.merge_completers") +async def test_get_input_with_combined_completion_defaults( + mock_merge_completers, mock_update_model, mock_file_history, mock_prompt_session_cls +): + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value="test input") + mock_prompt_session_cls.return_value = mock_session_instance + mock_update_model.return_value = "processed input" + mock_merge_completers.return_value = MagicMock() # Mocked merged completer + + result = await get_input_with_combined_completion() + + mock_prompt_session_cls.assert_called_once() + assert ( + mock_prompt_session_cls.call_args[1]["completer"] + == mock_merge_completers.return_value + ) + assert mock_prompt_session_cls.call_args[1]["history"] is None + assert mock_prompt_session_cls.call_args[1]["complete_while_typing"] is True + assert "key_bindings" in mock_prompt_session_cls.call_args[1] + assert "input_processors" in mock_prompt_session_cls.call_args[1] + assert isinstance( + mock_prompt_session_cls.call_args[1]["input_processors"][0], + AttachmentPlaceholderProcessor, + ) + + mock_session_instance.prompt_async.assert_called_once() + # Check default prompt string was converted to FormattedText + assert isinstance(mock_session_instance.prompt_async.call_args[0][0], FormattedText) + assert mock_session_instance.prompt_async.call_args[0][0] == FormattedText( + [(None, ">>> ")] + ) + assert "style" in mock_session_instance.prompt_async.call_args[1] + + mock_update_model.assert_called_once_with("test input") + assert result == "processed input" + mock_file_history.assert_not_called() + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +@patch("code_puppy.command_line.prompt_toolkit_completion.FileHistory") +@patch("code_puppy.command_line.prompt_toolkit_completion.update_model_in_input") +async def test_get_input_with_combined_completion_with_history( + mock_update_model, mock_file_history, mock_prompt_session_cls +): + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value="input with history") + mock_prompt_session_cls.return_value = mock_session_instance + mock_update_model.return_value = "processed history input" + mock_history_instance = MagicMock() + mock_file_history.return_value = mock_history_instance + + history_path = "~/.my_test_history" + result = await get_input_with_combined_completion(history_file=history_path) + + mock_file_history.assert_called_once_with(history_path) + assert mock_prompt_session_cls.call_args[1]["history"] == mock_history_instance + mock_update_model.assert_called_once_with("input with history") + assert result == "processed history input" + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +@patch("code_puppy.command_line.prompt_toolkit_completion.update_model_in_input") +async def test_get_input_with_combined_completion_custom_prompt( + mock_update_model, mock_prompt_session_cls +): + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value="custom prompt input") + mock_prompt_session_cls.return_value = mock_session_instance + mock_update_model.return_value = "processed custom prompt" + + # Test with string prompt + custom_prompt_str = "Custom> " + await get_input_with_combined_completion(prompt_str=custom_prompt_str) + assert mock_session_instance.prompt_async.call_args[0][0] == FormattedText( + [(None, custom_prompt_str)] + ) + + # Test with FormattedText prompt + custom_prompt_ft = FormattedText([("class:test", "Formatted>")]) + await get_input_with_combined_completion(prompt_str=custom_prompt_ft) + assert mock_session_instance.prompt_async.call_args[0][0] == custom_prompt_ft + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +@patch( + "code_puppy.command_line.prompt_toolkit_completion.update_model_in_input", + return_value=None, +) # Simulate no model update +async def test_get_input_with_combined_completion_no_model_update( + mock_update_model_no_change, mock_prompt_session_cls +): + raw_input = "raw user input" + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value=raw_input) + mock_prompt_session_cls.return_value = mock_session_instance + + result = await get_input_with_combined_completion() + mock_update_model_no_change.assert_called_once_with(raw_input) + assert result == raw_input + + +# To test key bindings, we need to inspect the KeyBindings object passed to PromptSession +# We can get it from the mock_prompt_session_cls.call_args + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +async def test_get_input_key_binding_alt_m(mock_prompt_session_cls): + # We don't need the function to run fully, just to set up PromptSession + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value="test") + mock_prompt_session_cls.return_value = mock_session_instance + + await get_input_with_combined_completion() + + bindings = mock_prompt_session_cls.call_args[1]["key_bindings"] + # Find the Alt+M binding (Escape, 'm') + alt_m_handler = None + for binding in bindings.bindings: + if ( + len(binding.keys) == 2 + and binding.keys[0] == Keys.Escape + and binding.keys[1] == "m" + ): + alt_m_handler = binding.handler + break + assert alt_m_handler is not None, "Alt+M keybinding not found" + + mock_event = MagicMock() + mock_event.app.current_buffer = MagicMock() + alt_m_handler(mock_event) + mock_event.app.current_buffer.insert_text.assert_called_once_with("\n") + + +@pytest.mark.asyncio +@patch("code_puppy.command_line.prompt_toolkit_completion.PromptSession") +async def test_get_input_key_binding_escape(mock_prompt_session_cls): + mock_session_instance = MagicMock() + mock_session_instance.prompt_async = AsyncMock(return_value="test") + mock_prompt_session_cls.return_value = mock_session_instance + + await get_input_with_combined_completion() + + bindings = mock_prompt_session_cls.call_args[1]["key_bindings"] + found_escape_handler = None + for binding_obj in bindings.bindings: + if binding_obj.keys == (Keys.Escape,): + found_escape_handler = binding_obj.handler + break + + assert found_escape_handler is not None, "Standalone Escape keybinding not found" + + mock_event = MagicMock() + mock_event.app = MagicMock() + mock_event.app.exit.side_effect = KeyboardInterrupt + with pytest.raises(KeyboardInterrupt): + found_escape_handler(mock_event) + mock_event.app.exit.assert_called_once_with(exception=KeyboardInterrupt) + + +@pytest.mark.asyncio +async def test_attachment_placeholder_processor_renders_images(tmp_path: Path) -> None: + image_path = tmp_path / "fluffy pupper.png" + image_path.write_bytes(b"png") + + processor = AttachmentPlaceholderProcessor() + document_text = f"describe {image_path} now" + document = Document(text=document_text, cursor_position=len(document_text)) + + fragments = [("", document_text)] + buffer = Buffer(document=document) + control = BufferControl(buffer=buffer) + transformation_input = TransformationInput( + buffer_control=control, + document=document, + lineno=0, + source_to_display=lambda i: i, + fragments=fragments, + width=len(document_text), + height=1, + ) + + transformed = processor.apply_transformation(transformation_input) + rendered_text = "".join(text for _style, text in transformed.fragments) + + assert "[png image]" in rendered_text + assert "fluffy pupper" not in rendered_text + + +@pytest.mark.asyncio +async def test_attachment_placeholder_processor_handles_links() -> None: + processor = AttachmentPlaceholderProcessor() + document_text = "check https://example.com/pic.png" + document = Document(text=document_text, cursor_position=len(document_text)) + + fragments = [("", document_text)] + buffer = Buffer(document=document) + control = BufferControl(buffer=buffer) + transformation_input = TransformationInput( + buffer_control=control, + document=document, + lineno=0, + source_to_display=lambda i: i, + fragments=fragments, + width=len(document_text), + height=1, + ) + + transformed = processor.apply_transformation(transformation_input) + rendered_text = "".join(text for _style, text in transformed.fragments) + + assert "[link]" in rendered_text + assert "https://example.com/pic.png" not in rendered_text diff --git a/tests/test_round_robin_rotate_every.py b/tests/test_round_robin_rotate_every.py new file mode 100644 index 00000000..33a1c48e --- /dev/null +++ b/tests/test_round_robin_rotate_every.py @@ -0,0 +1,111 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from code_puppy.round_robin_model import RoundRobinModel + + +class MockModel: + """A simple mock model that implements the required interface.""" + + def __init__(self, name, settings=None): + self._name = name + self._settings = settings + self.request = AsyncMock(return_value=f"response_from_{name}") + + @property + def model_name(self): + return self._name + + @property + def settings(self): + return self._settings + + def customize_request_parameters(self, model_request_parameters): + return model_request_parameters + + +@pytest.mark.asyncio +async def test_round_robin_rotate_every_default(): + """Test that round-robin model rotates every request by default.""" + # Create mock models + model1 = MockModel("model1") + model2 = MockModel("model2") + + # Create round-robin model with default rotate_every (1) + rr_model = RoundRobinModel(model1, model2) + + # Verify model name format + assert rr_model.model_name == "round_robin:model1,model2" + + # First request should go to model1 + await rr_model.request([], None, MagicMock()) + model1.request.assert_called_once() + model2.request.assert_not_called() + + # Second request should go to model2 (rotated) + await rr_model.request([], None, MagicMock()) + model1.request.assert_called_once() + model2.request.assert_called_once() + + +@pytest.mark.asyncio +async def test_round_robin_rotate_every_custom(): + """Test that round-robin model rotates every N requests when specified.""" + # Create mock models + model1 = MockModel("model1") + model2 = MockModel("model2") + + # Create round-robin model with rotate_every=3 + rr_model = RoundRobinModel(model1, model2, rotate_every=3) + + # Verify model name format includes rotate_every parameter + assert rr_model.model_name == "round_robin:model1,model2:rotate_every=3" + + # First 3 requests should all go to model1 + for i in range(3): + await rr_model.request([], None, MagicMock()) + + assert model1.request.call_count == 3 + assert model2.request.call_count == 0 + + # Reset mocks to clear call counts + model1.request.reset_mock() + model2.request.reset_mock() + + # Next 3 requests should all go to model2 + for i in range(3): + await rr_model.request([], None, MagicMock()) + + assert model1.request.call_count == 0 + assert model2.request.call_count == 3 + + # Reset mocks again + model1.request.reset_mock() + model2.request.reset_mock() + + # Next request should go back to model1 + await rr_model.request([], None, MagicMock()) + + assert model1.request.call_count == 1 + assert model2.request.call_count == 0 + + +def test_round_robin_rotate_every_validation(): + """Test that rotate_every parameter is validated correctly.""" + model1 = MockModel("model1") + model2 = MockModel("model2") + + # Should raise ValueError for rotate_every < 1 + with pytest.raises(ValueError, match="rotate_every must be at least 1"): + RoundRobinModel(model1, model2, rotate_every=0) + + with pytest.raises(ValueError, match="rotate_every must be at least 1"): + RoundRobinModel(model1, model2, rotate_every=-1) + + # Should work fine for rotate_every >= 1 + rr_model = RoundRobinModel(model1, model2, rotate_every=1) + assert rr_model._rotate_every == 1 + + rr_model = RoundRobinModel(model1, model2, rotate_every=5) + assert rr_model._rotate_every == 5 diff --git a/tests/test_session_storage.py b/tests/test_session_storage.py new file mode 100644 index 00000000..339f9dc2 --- /dev/null +++ b/tests/test_session_storage.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +import json +import os +from pathlib import Path +from typing import Callable, List + +import pytest + +from code_puppy.session_storage import ( + cleanup_sessions, + list_sessions, + load_session, + save_session, +) + + +@pytest.fixture() +def history() -> List[str]: + return ["one", "two", "three"] + + +@pytest.fixture() +def token_estimator() -> Callable[[object], int]: + return lambda message: len(str(message)) + + +def test_save_and_load_session(tmp_path: Path, history: List[str], token_estimator): + session_name = "demo_session" + timestamp = "2024-01-01T00:00:00" + metadata = save_session( + history=history, + session_name=session_name, + base_dir=tmp_path, + timestamp=timestamp, + token_estimator=token_estimator, + ) + + assert metadata.session_name == session_name + assert metadata.message_count == len(history) + assert metadata.total_tokens == sum(token_estimator(m) for m in history) + assert metadata.pickle_path.exists() + assert metadata.metadata_path.exists() + + with metadata.metadata_path.open() as meta_file: + stored = json.load(meta_file) + assert stored["session_name"] == session_name + assert stored["auto_saved"] is False + + loaded_history = load_session(session_name, tmp_path) + assert loaded_history == history + + +def test_list_sessions(tmp_path: Path, history: List[str], token_estimator): + names = ["beta", "alpha", "gamma"] + for name in names: + save_session( + history=history, + session_name=name, + base_dir=tmp_path, + timestamp="2024-01-01T00:00:00", + token_estimator=token_estimator, + ) + + assert list_sessions(tmp_path) == sorted(names) + + +def test_cleanup_sessions(tmp_path: Path, history: List[str], token_estimator): + session_names = ["session_earliest", "session_middle", "session_latest"] + for index, name in enumerate(session_names): + metadata = save_session( + history=history, + session_name=name, + base_dir=tmp_path, + timestamp="2024-01-01T00:00:00", + token_estimator=token_estimator, + ) + os.utime(metadata.pickle_path, (0, index)) + + removed = cleanup_sessions(tmp_path, 2) + assert removed == ["session_earliest"] + remaining = list_sessions(tmp_path) + assert sorted(remaining) == sorted(["session_middle", "session_latest"]) diff --git a/tests/test_tools_registration.py b/tests/test_tools_registration.py new file mode 100644 index 00000000..a0541b49 --- /dev/null +++ b/tests/test_tools_registration.py @@ -0,0 +1,105 @@ +"""Tests for the tool registration system.""" + +from unittest.mock import MagicMock + +from code_puppy.tools import ( + TOOL_REGISTRY, + get_available_tool_names, + register_all_tools, + register_tools_for_agent, +) + + +class TestToolRegistration: + """Test tool registration functionality.""" + + def test_tool_registry_structure(self): + """Test that the tool registry has the expected structure.""" + expected_tools = [ + "list_files", + "read_file", + "grep", + "edit_file", + "delete_file", + "agent_run_shell_command", + "agent_share_your_reasoning", + "list_agents", + "invoke_agent", + ] + + assert isinstance(TOOL_REGISTRY, dict) + + # Check all expected tools are present + for tool in expected_tools: + assert tool in TOOL_REGISTRY, f"Tool {tool} missing from registry" + + # Check structure of registry entries + for tool_name, reg_func in TOOL_REGISTRY.items(): + assert callable(reg_func), ( + f"Registration function for {tool_name} is not callable" + ) + + def test_get_available_tool_names(self): + """Test that get_available_tool_names returns the correct tools.""" + tools = get_available_tool_names() + + assert isinstance(tools, list) + assert len(tools) == len(TOOL_REGISTRY) + + for tool in tools: + assert tool in TOOL_REGISTRY + + def test_register_tools_for_agent(self): + """Test registering specific tools for an agent.""" + mock_agent = MagicMock() + + # Test registering file operations tools + register_tools_for_agent(mock_agent, ["list_files", "read_file"]) + + # The mock agent should have had registration functions called + # (We can't easily test the exact behavior since it depends on decorators) + # But we can test that no exceptions were raised + assert True # If we get here, no exception was raised + + def test_register_tools_invalid_tool(self): + """Test that registering an invalid tool prints warning and continues.""" + mock_agent = MagicMock() + + # This should not raise an error, just print a warning and continue + register_tools_for_agent(mock_agent, ["invalid_tool"]) + + # Verify agent was not called for the invalid tool + assert mock_agent.call_count == 0 or not any( + "invalid_tool" in str(call) for call in mock_agent.call_args_list + ) + + def test_register_all_tools(self): + """Test registering all available tools.""" + mock_agent = MagicMock() + + # This should register all tools without error + register_all_tools(mock_agent) + + # Test passed if no exception was raised + assert True + + def test_register_tools_by_category(self): + """Test that tools from different categories can be registered.""" + mock_agent = MagicMock() + + # Test file operations + register_tools_for_agent(mock_agent, ["list_files"]) + + # Test file modifications + register_tools_for_agent(mock_agent, ["edit_file"]) + + # Test command runner + register_tools_for_agent(mock_agent, ["agent_run_shell_command"]) + + # Test mixed categories + register_tools_for_agent( + mock_agent, ["read_file", "delete_file", "agent_share_your_reasoning"] + ) + + # Test passed if no exception was raised + assert True diff --git a/tests/test_tui_rich_object_rendering.py b/tests/test_tui_rich_object_rendering.py new file mode 100644 index 00000000..263b308d --- /dev/null +++ b/tests/test_tui_rich_object_rendering.py @@ -0,0 +1,370 @@ +#!/usr/bin/env python3 +""" +Test that TUI renderer properly converts Rich objects to text instead of showing object references. +""" + +import asyncio + +from rich.markdown import Markdown +from rich.syntax import Syntax +from rich.table import Table + +from code_puppy.messaging import MessageType, UIMessage +from code_puppy.messaging.message_queue import MessageQueue +from code_puppy.messaging.renderers import TUIRenderer + + +class MockTUIApp: + """Mock TUI app to capture messages.""" + + def __init__(self): + self.system_messages = [] + self.agent_messages = [] + self.agent_reasoning_messages = [] + self.error_messages = [] + + def add_system_message(self, content, message_group=None, group_id=None): + self.system_messages.append(content) + + def add_agent_message(self, content, message_group=None): + self.agent_messages.append(content) + + def add_agent_reasoning_message(self, content, message_group=None): + self.agent_reasoning_messages.append(content) + + def add_error_message(self, content, message_group=None): + self.error_messages.append(content) + + def add_planned_next_steps_message(self, content, message_group=None): + self.agent_reasoning_messages.append(content) # Can reuse for simplicity + + +def test_tui_renderer_rich_table(): + """Test that Rich Table objects are properly rendered to text.""" + queue = MessageQueue() + mock_app = MockTUIApp() + renderer = TUIRenderer(queue, mock_app) + + # Create a Rich Table + table = Table(title="Test Table") + table.add_column("File", style="cyan") + table.add_column("Size", style="green") + table.add_row("test.py", "1.2 KB") + table.add_row("main.py", "5.4 KB") + + message = UIMessage(MessageType.TOOL_OUTPUT, table) + asyncio.run(renderer.render_message(message)) + + # Check that the message was rendered properly + assert len(mock_app.agent_messages) == 1 + rendered_content = mock_app.agent_messages[0] + + # Should not contain object reference + assert "object at 0x" not in rendered_content + assert "rich.table.Table" not in rendered_content + + # Should contain table content + assert "Test Table" in rendered_content + assert "File" in rendered_content + assert "Size" in rendered_content + assert "test.py" in rendered_content + assert "main.py" in rendered_content + + # Should contain table border characters + assert "┏" in rendered_content or "┌" in rendered_content + + +def test_tui_renderer_rich_syntax(): + """Test that Rich Syntax objects are properly rendered to text.""" + queue = MessageQueue() + mock_app = MockTUIApp() + renderer = TUIRenderer(queue, mock_app) + + # Create a Rich Syntax object + code = '''def hello_world(): + print("Hello, World!") + return "success"''' + syntax = Syntax(code, "python", theme="monokai", line_numbers=True) + + message = UIMessage(MessageType.AGENT_REASONING, syntax) + asyncio.run(renderer.render_message(message)) + + # Check that the message was rendered properly + assert len(mock_app.agent_reasoning_messages) == 1 + rendered_content = mock_app.agent_reasoning_messages[0] + + # Should not contain object reference + assert "object at 0x" not in rendered_content + assert "rich.syntax.Syntax" not in rendered_content + + # Should contain code content + assert "def hello_world()" in rendered_content + assert 'print("Hello, World!")' in rendered_content + assert 'return "success"' in rendered_content + + +def test_tui_renderer_rich_markdown(): + """Test that Rich Markdown objects are properly rendered to text.""" + queue = MessageQueue() + mock_app = MockTUIApp() + renderer = TUIRenderer(queue, mock_app) + + # Create a Rich Markdown object + markdown_text = """ +# Agent Reasoning + +I need to: + +1. **Analyze** the code structure +2. *Identify* potential issues +3. `Implement` the solution + +```python +print("This is a code block") +``` +""" + markdown = Markdown(markdown_text) + + message = UIMessage(MessageType.SYSTEM, markdown) + asyncio.run(renderer.render_message(message)) + + # Check that the message was rendered properly + assert len(mock_app.system_messages) == 1 + rendered_content = mock_app.system_messages[0] + + # Should not contain object reference + assert "object at 0x" not in rendered_content + assert "rich.markdown.Markdown" not in rendered_content + + # Should contain markdown content + assert "Agent Reasoning" in rendered_content + assert "Analyze" in rendered_content + assert "Identify" in rendered_content + assert "Implement" in rendered_content + assert 'print("This is a code block")' in rendered_content + + +def test_tui_renderer_plain_string(): + """Test that plain strings are still handled correctly.""" + queue = MessageQueue() + mock_app = MockTUIApp() + renderer = TUIRenderer(queue, mock_app) + + message = UIMessage(MessageType.INFO, "This is a plain string message") + asyncio.run(renderer.render_message(message)) + + # Check that the message was rendered properly + assert len(mock_app.system_messages) == 1 + assert mock_app.system_messages[0] == "This is a plain string message" + + +def test_queue_console_rich_markdown(): + """Test that QueueConsole properly handles Rich Markdown objects.""" + from code_puppy.messaging.message_queue import MessageQueue + from code_puppy.messaging.queue_console import QueueConsole + + queue = MessageQueue() + # Mark renderer as active so messages go to main queue instead of startup buffer + queue.mark_renderer_active() + console = QueueConsole(queue) + + # Create a Rich Markdown object (simulating what happens in agent reasoning) + reasoning_text = """ +# Agent Analysis + +I need to: + +1. **Analyze** the problem +2. *Implement* a solution +3. `Test` the fix + +```python +print("This is code") +``` +""" + markdown = Markdown(reasoning_text) + + # Print the markdown object (this is what command_runner.py does) + console.print(markdown) + + # Get the message from the queue + message = queue.get_nowait() + + # Verify the message was processed correctly + assert message is not None + assert ( + message.type.value == "agent_reasoning" + ) # Should be inferred as agent reasoning + + # The content should be the Rich Markdown object itself, not a string representation + assert isinstance(message.content, Markdown) + + # Verify it can be rendered properly by TUIRenderer + mock_app = MockTUIApp() + renderer = TUIRenderer(queue, mock_app) + + # Render the message + asyncio.run(renderer.render_message(message)) + + # Check that it was rendered as text, not object reference + assert len(mock_app.agent_reasoning_messages) == 1 + rendered_content = mock_app.agent_reasoning_messages[0] + + # Should not contain object reference + assert "object at 0x" not in rendered_content + assert "rich.markdown.Markdown" not in rendered_content + + # Should contain the actual markdown content + assert "Agent Analysis" in rendered_content + assert "Analyze" in rendered_content + assert "Implement" in rendered_content + assert "Test" in rendered_content + assert 'print("This is code")' in rendered_content + + +def test_queue_console_mixed_content(): + """Test that QueueConsole properly handles mixed Rich and string content.""" + from code_puppy.messaging.message_queue import MessageQueue + from code_puppy.messaging.queue_console import QueueConsole + + queue = MessageQueue() + # Mark renderer as active so messages go to main queue instead of startup buffer + queue.mark_renderer_active() + console = QueueConsole(queue) + + # Create a Rich Markdown object + markdown = Markdown("**Bold text**") + + # Print mixed content + console.print("Prefix: ", markdown, " :suffix") + + # Get the message from the queue + message = queue.get_nowait() + + # Should be processed as string content (not Rich object) + assert isinstance(message.content, str) + assert "object at 0x" not in message.content + assert "Prefix:" in message.content + assert "Bold text" in message.content + assert ":suffix" in message.content + + +def test_system_message_grouping(): + """Test that system messages with the same group_id get concatenated.""" + from datetime import datetime, timezone + + from code_puppy.tui.models.chat_message import ChatMessage + from code_puppy.tui.models.enums import MessageType + + # Mock ChatView to test logic without widget mounting + class MockChatView: + def __init__(self): + self.messages = [] + + def add_message(self, message): + # Simplified version of the grouping logic from chat_view.py + if ( + message.type == MessageType.SYSTEM + and message.group_id is not None + and self.messages + and self.messages[-1].type == MessageType.SYSTEM + and self.messages[-1].group_id == message.group_id + ): + # Concatenate with the previous system message + previous_message = self.messages[-1] + previous_message.content += "\n" + message.content + return + + # Add to messages list + self.messages.append(message) + + # Create a MockChatView instance + chat_view = MockChatView() + + # Add first system message with group_id + msg1 = ChatMessage( + id="test1", + type=MessageType.SYSTEM, + content="First message in group", + timestamp=datetime.now(timezone.utc), + group_id="test_group_123", + ) + chat_view.add_message(msg1) + + # Add second system message with same group_id + msg2 = ChatMessage( + id="test2", + type=MessageType.SYSTEM, + content="Second message in group", + timestamp=datetime.now(timezone.utc), + group_id="test_group_123", + ) + chat_view.add_message(msg2) + + # Add third system message with different group_id + msg3 = ChatMessage( + id="test3", + type=MessageType.SYSTEM, + content="Different group message", + timestamp=datetime.now(timezone.utc), + group_id="test_group_456", + ) + chat_view.add_message(msg3) + + # Check that only 2 messages are stored (first and third) + assert len(chat_view.messages) == 2 + + # Check that the first message content has been concatenated + assert ( + chat_view.messages[0].content + == "First message in group\nSecond message in group" + ) + assert chat_view.messages[0].group_id == "test_group_123" + + # Check that the second stored message is the different group + assert chat_view.messages[1].content == "Different group message" + assert chat_view.messages[1].group_id == "test_group_456" + + +def test_tools_generate_group_ids(): + """Test that our tools generate group_ids when emitting messages.""" + import time + + from code_puppy.tools.common import generate_group_id + + # Test group_id generation + group_id1 = generate_group_id("list_files", "/test/path") + time.sleep(0.001) # Small delay to ensure different timestamp + group_id2 = generate_group_id("list_files", "/test/path") + group_id3 = generate_group_id("edit_file", "/test/file.py") + + # Group IDs should be unique when called at different times + assert group_id1 != group_id2 + + # But should start with tool name + assert group_id1.startswith("list_files_") + assert group_id2.startswith("list_files_") + assert group_id3.startswith("edit_file_") + + # Should have consistent format + assert "_" in group_id1 + assert len(group_id1.split("_")) >= 2 + + # Same tool with same context can have same ID if called at same time + group_id4 = generate_group_id("test_tool", "same_context") + group_id5 = generate_group_id("test_tool", "same_context") + # This might be the same or different depending on timing, both are valid + assert group_id4.startswith("test_tool_") + assert group_id5.startswith("test_tool_") + + +if __name__ == "__main__": + test_tui_renderer_rich_table() + test_tui_renderer_rich_syntax() + test_tui_renderer_rich_markdown() + test_tui_renderer_plain_string() + test_queue_console_rich_markdown() + test_queue_console_mixed_content() + test_system_message_grouping() + test_tools_generate_group_ids() + print("✅ All tests passed!") diff --git a/tests/test_version_checker.py b/tests/test_version_checker.py new file mode 100644 index 00000000..38abe170 --- /dev/null +++ b/tests/test_version_checker.py @@ -0,0 +1,36 @@ +from code_puppy.version_checker import normalize_version, versions_are_equal + + +def test_normalize_version(): + """Test version string normalization.""" + assert normalize_version("v1.2.3") == "1.2.3" + assert normalize_version("1.2.3") == "1.2.3" + assert normalize_version("v0.0.78") == "0.0.78" + assert normalize_version("0.0.78") == "0.0.78" + assert normalize_version("") == "" + assert normalize_version(None) is None + assert normalize_version("vvv1.2.3") == "1.2.3" # Multiple v's + + +def test_versions_are_equal(): + """Test version equality comparison.""" + # Same versions with and without v prefix + assert versions_are_equal("1.2.3", "v1.2.3") is True + assert versions_are_equal("v1.2.3", "1.2.3") is True + assert versions_are_equal("v1.2.3", "v1.2.3") is True + assert versions_are_equal("1.2.3", "1.2.3") is True + + # The specific case from our API + assert versions_are_equal("0.0.78", "v0.0.78") is True + assert versions_are_equal("v0.0.78", "0.0.78") is True + + # Different versions + assert versions_are_equal("1.2.3", "1.2.4") is False + assert versions_are_equal("v1.2.3", "v1.2.4") is False + assert versions_are_equal("1.2.3", "v1.2.4") is False + + # Edge cases + assert versions_are_equal("", "") is True + assert versions_are_equal(None, None) is True + assert versions_are_equal("1.2.3", "") is False + assert versions_are_equal("", "1.2.3") is False diff --git a/tests/test_web_search.py b/tests/test_web_search.py deleted file mode 100644 index eb1e7bd8..00000000 --- a/tests/test_web_search.py +++ /dev/null @@ -1,78 +0,0 @@ -import requests -from unittest.mock import patch -from code_puppy.tools.web_search import web_search - - -def test_web_search_success(): - query = "python testing" - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.status_code = 200 - mock_response.text = '

Test Title

Link
' - results = web_search(None, query) - - assert len(results) == 1 - assert results[0]["title"] == "Test Title" - assert results[0]["url"] == "http://example.com" - - -def test_web_search_http_error(): - query = "python testing" - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.raise_for_status.side_effect = requests.HTTPError - try: - web_search(None, query) - except requests.HTTPError: - assert True - - -def test_web_search_no_results(): - query = "something_not_found" - html = "" # No result divs - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.status_code = 200 - mock_response.text = html - results = web_search(None, query) - assert results == [] - - -def test_web_search_broken_html(): - query = "broken html" - html = '
' # div with missing h3 and a - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.status_code = 200 - mock_response.text = html - results = web_search(None, query) - assert results == [] - - -def test_web_search_num_results_limit(): - query = "multiple results" - html = "".join( - [ - f'

Title {i}

Link
' - for i in range(10) - ] - ) - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.status_code = 200 - mock_response.text = html - results = web_search(None, query, num_results=3) - assert len(results) == 3 - assert results[0]["title"] == "Title 0" - assert results[1]["url"] == "http://example.com/1" - - -def test_web_search_empty_soup(): - query = "empty soup" - html = " " - with patch("requests.get") as mock_get: - mock_response = mock_get.return_value - mock_response.status_code = 200 - mock_response.text = html - results = web_search(None, query) - assert results == [] diff --git a/uv.lock b/uv.lock index 8991e107..94b54591 100644 --- a/uv.lock +++ b/uv.lock @@ -1,14 +1,120 @@ version = 1 -revision = 2 -requires-python = ">=3.10" +revision = 3 +requires-python = ">=3.11" [[package]] -name = "aiolimiter" -version = "1.2.1" +name = "ag-ui-protocol" +version = "0.1.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/23/b52debf471f7a1e42e362d959a3982bdcb4fe13a5d46e63d28868807a79c/aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9", size = 7185, upload-time = "2024-12-08T15:31:51.496Z" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7b/d7/a8f8789b3b8b5f7263a902361468e8dfefd85ec63d1d5398579b9175d76d/ag_ui_protocol-0.1.9.tar.gz", hash = "sha256:94d75e3919ff75e0b608a7eed445062ea0e6f11cd33b3386a7649047e0c7abd3", size = 4988, upload-time = "2025-09-19T13:36:26.903Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/50/2bb71a2a9135f4d88706293773320d185789b592987c09f79e9bf2f4875f/ag_ui_protocol-0.1.9-py3-none-any.whl", hash = "sha256:44c1238b0576a3915b3a16e1b3855724e08e92ebc96b1ff29379fbd3bfbd400b", size = 7070, upload-time = "2025-09-19T13:36:25.791Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, +] + +[[package]] +name = "aiohttp-jinja2" +version = "1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "jinja2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/39/da5a94dd89b1af7241fb7fc99ae4e73505b5f898b540b6aba6dc7afe600e/aiohttp-jinja2-1.6.tar.gz", hash = "sha256:a3a7ff5264e5bca52e8ae547bbfd0761b72495230d438d05b6c0915be619b0e2", size = 53057, upload-time = "2023-11-18T15:30:52.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/ba/df6e8e1045aebc4778d19b8a3a9bc1808adb1619ba94ca354d9ba17d86c3/aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7", size = 6711, upload-time = "2024-12-08T15:31:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/65238d4246307195411b87a07d03539049819b022c01bcc773826f600138/aiohttp_jinja2-1.6-py3-none-any.whl", hash = "sha256:0df405ee6ad1b58e5a068a105407dc7dcc1704544c559f1938babde954f945c7", size = 11736, upload-time = "2023-11-18T15:30:50.743Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] [[package]] @@ -22,35 +128,35 @@ wheels = [ [[package]] name = "anthropic" -version = "0.52.0" +version = "0.68.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "distro" }, + { name = "docstring-parser" }, { name = "httpx" }, { name = "jiter" }, { name = "pydantic" }, { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/fd/8a9332f5baf352c272494a9d359863a53385a208954c1a7251a524071930/anthropic-0.52.0.tar.gz", hash = "sha256:f06bc924d7eb85f8a43fe587b875ff58b410d60251b7dc5f1387b322a35bd67b", size = 229372, upload-time = "2025-05-22T16:42:22.044Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/46/da44bf087ddaf3f7dbe4808c00c7cde466fe68c4fc9fbebdfc231f4ea205/anthropic-0.68.0.tar.gz", hash = "sha256:507e9b5f627d1b249128ff15b21855e718fa4ed8dabc787d0e68860a4b32a7a8", size = 471584, upload-time = "2025-09-17T15:20:19.509Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/43/172c0031654908bbac2a87d356fff4de1b4947a9b14b9658540b69416417/anthropic-0.52.0-py3-none-any.whl", hash = "sha256:c026daa164f0e3bde36ce9cbdd27f5f1419fff03306be1e138726f42e6a7810f", size = 286076, upload-time = "2025-05-22T16:42:20Z" }, + { url = "https://files.pythonhosted.org/packages/60/32/2d7553184b05bdbec61dd600014a55b9028408aee6128b25cb6f20e3002c/anthropic-0.68.0-py3-none-any.whl", hash = "sha256:ac579ea5eca22a7165b1042e6af57c4bf556e51afae3ca80e24768d4756b78c0", size = 325199, upload-time = "2025-09-17T15:20:17.452Z" }, ] [[package]] name = "anyio" -version = "4.9.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -62,45 +168,66 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/da/e42d7a9d8dd33fa775f467e4028a47936da2f01e4b0e561f9ba0d74cb0ca/argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591", size = 43708, upload-time = "2025-04-03T04:57:01.591Z" }, ] +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + [[package]] name = "beautifulsoup4" -version = "4.13.4" +version = "4.13.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954, upload-time = "2025-08-24T14:06:13.168Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, + { url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113, upload-time = "2025-08-24T14:06:14.884Z" }, ] [[package]] name = "boto3" -version = "1.38.23" +version = "1.40.38" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/73/3f67417985007b385adab61dd9d251cf82d409ce5397ec7d067274b09492/boto3-1.38.23.tar.gz", hash = "sha256:bcf73aca469add09e165b8793be18e7578db8d2604d82505ab13dc2495bad982", size = 111806, upload-time = "2025-05-23T19:25:26.212Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/c7/1442380ad7e211089a3c94b758ffb01079eab0183700fba9d5be417b5cb4/boto3-1.40.38.tar.gz", hash = "sha256:932ebdd8dbf8ab5694d233df86d5d0950291e0b146c27cb46da8adb4f00f6ca4", size = 111559, upload-time = "2025-09-24T19:23:25.7Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/f5/9114596c6a4f5e4dade83fbdd271b9572367abdce73b9c7d27142e9e66c3/boto3-1.38.23-py3-none-any.whl", hash = "sha256:70ab8364f1f6f0a7e0eaf97f62fbdacf9c1e4cc1de330faf1c146ef9ab01e7d0", size = 139938, upload-time = "2025-05-23T19:25:24.158Z" }, + { url = "https://files.pythonhosted.org/packages/06/a9/e7e5fe3fec60fb87bc9f8b3874c4c606e290a64b2ae8c157e08c3e69d755/boto3-1.40.38-py3-none-any.whl", hash = "sha256:fac337b4f0615e4d6ceee44686e662f51d8e57916ed2bc763468e3e8c611a658", size = 139345, upload-time = "2025-09-24T19:23:23.756Z" }, ] [[package]] name = "botocore" -version = "1.38.23" +version = "1.40.38" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/d5/134a28a30cb1b0c9aa08ceb5d1a3e7afe956f7fa7abad2adda7c5c01d6a1/botocore-1.38.23.tar.gz", hash = "sha256:29685c91050a870c3809238dc5da1ac65a48a3a20b4bca46b6057dcb6b39c72a", size = 13908529, upload-time = "2025-05-23T19:25:15.199Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/11/82a216e24f1af1ba5c3c358201fb9eba5e502242f504dd1f42eb18cbf2c5/botocore-1.40.38.tar.gz", hash = "sha256:18039009e1eca2bff12e576e8dd3c80cd9b312294f1469c831de03169582ad59", size = 14354395, upload-time = "2025-09-24T19:23:14.522Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/dd/e047894efa3a39509f8fcc103dd096999aa52907c969d195af6b0d8e282f/botocore-1.38.23-py3-none-any.whl", hash = "sha256:a7f818672f10d7a080c2c4558428011c3e0abc1039a047d27ac76ec846158457", size = 13567446, upload-time = "2025-05-23T19:25:10.795Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f0/ca5a00dd8fe3768ecff54756457dd0c69ed8e1cd09d0f7c21599477b5d5b/botocore-1.40.38-py3-none-any.whl", hash = "sha256:7d60a7557db3a58f9394e7ecec1f6b87495ce947eb713f29d53aee83a6e9dc71", size = 14025193, upload-time = "2025-09-24T19:23:11.093Z" }, +] + +[[package]] +name = "browserforge" +version = "1.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/5c/fe4d8cc5d5e61a5b1585190bba19d25bb76c45fdfe9c7bf264f5301fcf33/browserforge-1.2.3.tar.gz", hash = "sha256:d5bec6dffd4748b30fbac9f9c1ef33b26c01a23185240bf90011843e174b7ecc", size = 38072, upload-time = "2025-01-29T09:45:48.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/53/c60eb5bd26cf8689e361031bebc431437bc988555e80ba52d48c12c1d866/browserforge-1.2.3-py3-none-any.whl", hash = "sha256:a6c71ed4688b2f1b0bee757ca82ddad0007cbba68a71eca66ca607dde382f132", size = 39626, upload-time = "2025-01-29T09:45:47.531Z" }, ] [[package]] @@ -124,124 +251,170 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/72/76/20fa66124dbe6be5cafeb312ece67de6b61dd91a0247d1ea13db4ebb33c2/cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a", size = 10080, upload-time = "2025-02-20T21:01:16.647Z" }, ] +[[package]] +name = "camoufox" +version = "0.4.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "browserforge" }, + { name = "click" }, + { name = "language-tags" }, + { name = "lxml" }, + { name = "numpy" }, + { name = "orjson" }, + { name = "platformdirs" }, + { name = "playwright" }, + { name = "pysocks" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "screeninfo" }, + { name = "tqdm" }, + { name = "typing-extensions" }, + { name = "ua-parser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/15/e0a1b586e354ea6b8d6612717bf4372aaaa6753444d5d006caf0bb116466/camoufox-0.4.11.tar.gz", hash = "sha256:0a2c9d24ac5070c104e7c2b125c0a3937f70efa416084ef88afe94c32a72eebe", size = 64409, upload-time = "2025-01-29T09:33:20.019Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/7b/a2f099a5afb9660271b3f20f6056ba679e7ab4eba42682266a65d5730f7e/camoufox-0.4.11-py3-none-any.whl", hash = "sha256:83864d434d159a7566990aa6524429a8d1a859cbf84d2f64ef4a9f29e7d2e5ff", size = 71628, upload-time = "2025-01-29T09:33:18.558Z" }, +] + [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] name = "code-puppy" -version = "0.0.12" +version = "0.0.204" source = { editable = "." } dependencies = [ { name = "bs4" }, + { name = "camoufox" }, + { name = "fastapi" }, { name = "httpx" }, { name = "httpx-limiter" }, + { name = "json-repair" }, { name = "logfire" }, + { name = "openai" }, + { name = "pathspec" }, + { name = "playwright" }, { name = "prompt-toolkit" }, { name = "pydantic" }, { name = "pydantic-ai" }, + { name = "pyjwt" }, { name = "pytest-cov" }, { name = "python-dotenv" }, + { name = "rapidfuzz" }, { name = "rich" }, + { name = "ripgrep" }, { name = "ruff" }, + { name = "tenacity" }, + { name = "termcolor" }, + { name = "textual" }, + { name = "textual-dev" }, + { name = "uvicorn" }, ] [package.metadata] requires-dist = [ { name = "bs4", specifier = ">=0.0.2" }, + { name = "camoufox", specifier = ">=0.4.11" }, + { name = "fastapi", specifier = ">=0.110.0" }, { name = "httpx", specifier = ">=0.24.1" }, { name = "httpx-limiter", specifier = ">=0.3.0" }, + { name = "json-repair", specifier = ">=0.46.2" }, { name = "logfire", specifier = ">=0.7.1" }, - { name = "prompt-toolkit", specifier = ">=3.0.38" }, + { name = "openai", specifier = ">=1.99.1" }, + { name = "pathspec", specifier = ">=0.11.0" }, + { name = "playwright", specifier = ">=1.40.0" }, + { name = "prompt-toolkit", specifier = ">=3.0.52" }, { name = "pydantic", specifier = ">=2.4.0" }, - { name = "pydantic-ai", specifier = ">=0.1.0" }, + { name = "pydantic-ai", specifier = "==1.0.5" }, + { name = "pyjwt", specifier = ">=2.8.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "rapidfuzz", specifier = ">=3.13.0" }, { name = "rich", specifier = ">=13.4.2" }, + { name = "ripgrep", specifier = ">=14.1.0" }, { name = "ruff", specifier = ">=0.11.11" }, + { name = "tenacity", specifier = ">=8.2.0" }, + { name = "termcolor", specifier = ">=3.1.0" }, + { name = "textual", specifier = ">=5.0.0" }, + { name = "textual-dev", specifier = ">=1.7.0" }, + { name = "uvicorn", specifier = ">=0.29.0" }, ] [[package]] name = "cohere" -version = "5.15.0" +version = "5.18.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastavro" }, @@ -254,9 +427,9 @@ dependencies = [ { name = "types-requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/33/69c7d1b25a20eafef4197a1444c7f87d5241e936194e54876ea8996157e6/cohere-5.15.0.tar.gz", hash = "sha256:e802d4718ddb0bb655654382ebbce002756a3800faac30296cde7f1bdc6ff2cc", size = 135021, upload-time = "2025-04-15T13:39:51.404Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/f5/4682a965449826044c853c82796805f8d3e9214471e2f120db3063116584/cohere-5.18.0.tar.gz", hash = "sha256:93a7753458a45cd30c796300182d22bb1889eadc510727e1de3d8342cb2bc0bf", size = 164340, upload-time = "2025-09-12T14:17:16.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/87/94694db7fe6df979fbc03286eaabdfa98f1c8fa532960e5afdf965e10960/cohere-5.15.0-py3-none-any.whl", hash = "sha256:22ff867c2a6f2fc2b585360c6072f584f11f275ef6d9242bac24e0fa2df1dfb5", size = 259522, upload-time = "2025-04-15T13:39:49.498Z" }, + { url = "https://files.pythonhosted.org/packages/23/9b/3dc80542e60c711d57777b836a64345dda28f826c14fd64d9123278fcbfe/cohere-5.18.0-py3-none-any.whl", hash = "sha256:885e7be360206418db39425faa60dbcd7f38e39e7f84b824ee68442e6a436e93", size = 295384, upload-time = "2025-09-12T14:17:15.421Z" }, ] [[package]] @@ -270,66 +443,89 @@ wheels = [ [[package]] name = "coverage" -version = "7.8.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/6b/7dd06399a5c0b81007e3a6af0395cd60e6a30f959f8d407d3ee04642e896/coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a", size = 211573, upload-time = "2025-05-23T11:37:47.207Z" }, - { url = "https://files.pythonhosted.org/packages/f0/df/2b24090820a0bac1412955fb1a4dade6bc3b8dcef7b899c277ffaf16916d/coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be", size = 212006, upload-time = "2025-05-23T11:37:50.289Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c4/e4e3b998e116625562a872a342419652fa6ca73f464d9faf9f52f1aff427/coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3", size = 241128, upload-time = "2025-05-23T11:37:52.229Z" }, - { url = "https://files.pythonhosted.org/packages/b1/67/b28904afea3e87a895da850ba587439a61699bf4b73d04d0dfd99bbd33b4/coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6", size = 239026, upload-time = "2025-05-23T11:37:53.846Z" }, - { url = "https://files.pythonhosted.org/packages/8c/0f/47bf7c5630d81bc2cd52b9e13043685dbb7c79372a7f5857279cc442b37c/coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622", size = 240172, upload-time = "2025-05-23T11:37:55.711Z" }, - { url = "https://files.pythonhosted.org/packages/ba/38/af3eb9d36d85abc881f5aaecf8209383dbe0fa4cac2d804c55d05c51cb04/coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c", size = 240086, upload-time = "2025-05-23T11:37:57.724Z" }, - { url = "https://files.pythonhosted.org/packages/9e/64/c40c27c2573adeba0fe16faf39a8aa57368a1f2148865d6bb24c67eadb41/coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3", size = 238792, upload-time = "2025-05-23T11:37:59.737Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ab/b7c85146f15457671c1412afca7c25a5696d7625e7158002aa017e2d7e3c/coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404", size = 239096, upload-time = "2025-05-23T11:38:01.693Z" }, - { url = "https://files.pythonhosted.org/packages/d3/50/9446dad1310905fb1dc284d60d4320a5b25d4e3e33f9ea08b8d36e244e23/coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7", size = 214144, upload-time = "2025-05-23T11:38:03.68Z" }, - { url = "https://files.pythonhosted.org/packages/23/ed/792e66ad7b8b0df757db8d47af0c23659cdb5a65ef7ace8b111cacdbee89/coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347", size = 215043, upload-time = "2025-05-23T11:38:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/6a/4d/1ff618ee9f134d0de5cc1661582c21a65e06823f41caf801aadf18811a8e/coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9", size = 211692, upload-time = "2025-05-23T11:38:08.485Z" }, - { url = "https://files.pythonhosted.org/packages/96/fa/c3c1b476de96f2bc7a8ca01a9f1fcb51c01c6b60a9d2c3e66194b2bdb4af/coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879", size = 212115, upload-time = "2025-05-23T11:38:09.989Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c2/5414c5a1b286c0f3881ae5adb49be1854ac5b7e99011501f81c8c1453065/coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a", size = 244740, upload-time = "2025-05-23T11:38:11.947Z" }, - { url = "https://files.pythonhosted.org/packages/cd/46/1ae01912dfb06a642ef3dd9cf38ed4996fda8fe884dab8952da616f81a2b/coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5", size = 242429, upload-time = "2025-05-23T11:38:13.955Z" }, - { url = "https://files.pythonhosted.org/packages/06/58/38c676aec594bfe2a87c7683942e5a30224791d8df99bcc8439fde140377/coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11", size = 244218, upload-time = "2025-05-23T11:38:15.631Z" }, - { url = "https://files.pythonhosted.org/packages/80/0c/95b1023e881ce45006d9abc250f76c6cdab7134a1c182d9713878dfefcb2/coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a", size = 243865, upload-time = "2025-05-23T11:38:17.622Z" }, - { url = "https://files.pythonhosted.org/packages/57/37/0ae95989285a39e0839c959fe854a3ae46c06610439350d1ab860bf020ac/coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb", size = 242038, upload-time = "2025-05-23T11:38:19.966Z" }, - { url = "https://files.pythonhosted.org/packages/4d/82/40e55f7c0eb5e97cc62cbd9d0746fd24e8caf57be5a408b87529416e0c70/coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54", size = 242567, upload-time = "2025-05-23T11:38:21.912Z" }, - { url = "https://files.pythonhosted.org/packages/f9/35/66a51adc273433a253989f0d9cc7aa6bcdb4855382cf0858200afe578861/coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a", size = 214194, upload-time = "2025-05-23T11:38:23.571Z" }, - { url = "https://files.pythonhosted.org/packages/f6/8f/a543121f9f5f150eae092b08428cb4e6b6d2d134152c3357b77659d2a605/coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975", size = 215109, upload-time = "2025-05-23T11:38:25.137Z" }, - { url = "https://files.pythonhosted.org/packages/77/65/6cc84b68d4f35186463cd7ab1da1169e9abb59870c0f6a57ea6aba95f861/coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53", size = 213521, upload-time = "2025-05-23T11:38:27.123Z" }, - { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, - { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, - { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, - { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, - { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, - { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, - { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, - { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, - { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, - { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, - { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, - { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, - { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, - { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, - { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, - { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, - { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, - { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, - { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, - { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, - { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, - { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, - { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, - { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, - { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, - { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, - { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, - { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, - { url = "https://files.pythonhosted.org/packages/69/2f/572b29496d8234e4a7773200dd835a0d32d9e171f2d974f3fe04a9dbc271/coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837", size = 203636, upload-time = "2025-05-23T11:39:52.002Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, ] [package.optional-dependencies] @@ -338,15 +534,20 @@ toml = [ ] [[package]] -name = "deprecated" -version = "1.2.18" +name = "cython" +version = "3.1.4" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744, upload-time = "2025-01-27T10:46:25.7Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/f6/d762df1f436a0618455d37f4e4c4872a7cd0dcfc8dec3022ee99e4389c69/cython-3.1.4.tar.gz", hash = "sha256:9aefefe831331e2d66ab31799814eae4d0f8a2d246cbaaaa14d1be29ef777683", size = 3190778, upload-time = "2025-09-16T07:20:33.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998, upload-time = "2025-01-27T10:46:09.186Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ab/0a568bac7c4c052db4ae27edf01e16f3093cdfef04a2dfd313ef1b3c478a/cython-3.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d1d7013dba5fb0506794d4ef8947ff5ed021370614950a8d8d04e57c8c84499e", size = 3026389, upload-time = "2025-09-16T07:22:02.212Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b7/51f5566e1309215a7fef744975b2fabb56d3fdc5fa1922fd7e306c14f523/cython-3.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eed989f5c139d6550ef2665b783d86fab99372590c97f10a3c26c4523c5fce9e", size = 2955954, upload-time = "2025-09-16T07:22:03.782Z" }, + { url = "https://files.pythonhosted.org/packages/f0/51/2939c739cfdc67ab94935a2c4fcc75638afd15e1954552655503a4112e92/cython-3.1.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0d26af46505d0e54fe0f05e7ad089fd0eed8fa04f385f3ab88796f554467bcb9", size = 3062976, upload-time = "2025-09-16T07:22:20.517Z" }, + { url = "https://files.pythonhosted.org/packages/eb/bd/a84de57fd01017bf5dba84a49aeee826db21112282bf8d76ab97567ee15d/cython-3.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66ac8bb5068156c92359e3f0eefa138c177d59d1a2e8a89467881fa7d06aba3b", size = 2970701, upload-time = "2025-09-16T07:22:22.644Z" }, + { url = "https://files.pythonhosted.org/packages/24/10/1acc34f4d2d14de38e2d3ab4795ad1c8f547cebc2d9e7477a49a063ba607/cython-3.1.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ab549d0fc187804e0f14fc4759e4b5ad6485ffc01554b2f8b720cc44aeb929cd", size = 3051524, upload-time = "2025-09-16T07:22:40.607Z" }, + { url = "https://files.pythonhosted.org/packages/04/85/8457a78e9b9017a4fb0289464066ff2e73c5885f1edb9c1b9faaa2877fe2/cython-3.1.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52eae5d9bcc515441a436dcae2cbadfd00c5063d4d7809bd0178931690c06a76", size = 2958862, upload-time = "2025-09-16T07:22:42.646Z" }, + { url = "https://files.pythonhosted.org/packages/38/85/f1380e8370b470b218e452ba3995555524e3652f026333e6bad6c68770b5/cython-3.1.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c7258739d5560918741cb040bd85ba7cc2f09d868de9116a637e06714fec1f69", size = 3045864, upload-time = "2025-09-16T07:22:59.854Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/54c7bc78df1e55ac311054cb2fd33908f23b8a6f350c30defeca416d8077/cython-3.1.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b2d522ee8d3528035e247ee721fb40abe92e9ea852dc9e48802cec080d5de859", size = 2967105, upload-time = "2025-09-16T07:23:01.666Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/f7351052cf9db771fe4f32fca47fd66e6d9b53d8613b17faf7d130a9d553/cython-3.1.4-py3-none-any.whl", hash = "sha256:d194d95e4fa029a3f6c7d46bdd16d973808c7ea4797586911fdb67cb98b1a2c6", size = 1227541, upload-time = "2025-09-16T07:20:29.595Z" }, ] [[package]] @@ -359,121 +560,202 @@ wheels = [ ] [[package]] -name = "eval-type-backport" -version = "0.2.2" +name = "docstring-parser" +version = "0.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1", size = 9079, upload-time = "2024-12-21T20:09:46.005Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a", size = 5830, upload-time = "2024-12-21T20:09:44.175Z" }, + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] [[package]] -name = "exceptiongroup" -version = "1.3.0" +name = "eval-type-backport" +version = "0.2.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/ea/8b0ac4469d4c347c6a385ff09dc3c048c2d021696664e26c7ee6791631b5/eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1", size = 9079, upload-time = "2024-12-21T20:09:46.005Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/ce/31/55cd413eaccd39125368be33c46de24a1f639f2e12349b0361b4678f3915/eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a", size = 5830, upload-time = "2024-12-21T20:09:44.175Z" }, ] [[package]] name = "executing" -version = "2.2.0" +version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693, upload-time = "2025-01-22T15:41:29.403Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702, upload-time = "2025-01-22T15:41:25.929Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] [[package]] -name = "fasta2a" -version = "0.2.9" +name = "fastapi" +version = "0.117.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api" }, { name = "pydantic" }, { name = "starlette" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/e9/2a55a9192ac3541fc67908beb192cfc18518aecd4da838edfd6147bd8b02/fasta2a-0.2.9.tar.gz", hash = "sha256:1fc15fd4a14e361de160c41e0e15922bf6f7474285d9706d5b659051cc66c9a1", size = 12284, upload-time = "2025-05-26T07:48:32.794Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/7e/d9788300deaf416178f61fb3c2ceb16b7d0dc9f82a08fdb87a5e64ee3cc7/fastapi-0.117.1.tar.gz", hash = "sha256:fb2d42082d22b185f904ca0ecad2e195b851030bd6c5e4c032d1c981240c631a", size = 307155, upload-time = "2025-09-20T20:16:56.663Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/df/dd967535662ecc9e101a7d6c0c643a055aabc3de47411c31c1dd624356c8/fasta2a-0.2.9-py3-none-any.whl", hash = "sha256:8b855b36f29fde6dcb79ad55be337a8165381b679bec829913009c55581e284e", size = 15328, upload-time = "2025-05-26T07:48:22.372Z" }, + { url = "https://files.pythonhosted.org/packages/6d/45/d9d3e8eeefbe93be1c50060a9d9a9f366dba66f288bb518a9566a23a8631/fastapi-0.117.1-py3-none-any.whl", hash = "sha256:33c51a0d21cab2b9722d4e56dbb9316f3687155be6b276191790d8da03507552", size = 95959, upload-time = "2025-09-20T20:16:53.661Z" }, ] [[package]] name = "fastavro" -version = "1.11.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/8f/32664a3245247b13702d13d2657ea534daf64e58a3f72a3a2d10598d6916/fastavro-1.11.1.tar.gz", hash = "sha256:bf6acde5ee633a29fb8dfd6dfea13b164722bc3adc05a0e055df080549c1c2f8", size = 1016250, upload-time = "2025-05-18T04:54:31.413Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/be/53df3fec7fdabc1848896a76afb0f01ab96b58abb29611aa68a994290167/fastavro-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:603aa1c1d1be21fb4bcb63e1efb0711a9ddb337de81391c32dac95c6e0dacfcc", size = 944225, upload-time = "2025-05-18T04:54:34.586Z" }, - { url = "https://files.pythonhosted.org/packages/d0/cc/c7c76a082fbf5aaaf82ab7da7b9ede6fc99eb8f008c084c67d230b29c446/fastavro-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45653b312d4ce297e2bd802ea3ffd17ecbe718e5e8b6e2ae04cd72cb50bb99d5", size = 3105189, upload-time = "2025-05-18T04:54:36.855Z" }, - { url = "https://files.pythonhosted.org/packages/48/ff/5f1f0b5e3835e788ba8121d6dd6426cd4c6e58ce1bff02cb7810278648b0/fastavro-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998a53fc552e6bee9acda32af258f02557313c85fb5b48becba5b71ec82f421e", size = 3113124, upload-time = "2025-05-18T04:54:40.013Z" }, - { url = "https://files.pythonhosted.org/packages/e5/b8/1ac01433b55460dabeb6d3fbb05ba1c971d57137041e8f53b2e9f46cd033/fastavro-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9f878c9ad819467120cb066f1c73496c42eb24ecdd7c992ec996f465ef4cedad", size = 3155196, upload-time = "2025-05-18T04:54:42.307Z" }, - { url = "https://files.pythonhosted.org/packages/5e/a8/66e599b946ead031a5caba12772e614a7802d95476e8732e2e9481369973/fastavro-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da9e4c231ac4951092c2230ca423d8a3f2966718f072ac1e2c5d2d44c70b2a50", size = 3229028, upload-time = "2025-05-18T04:54:44.503Z" }, - { url = "https://files.pythonhosted.org/packages/0e/e7/17c35e2dfe8a9e4f3735eabdeec366b0edc4041bb1a84fcd528c8efd12af/fastavro-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:7423bfad3199567eeee7ad6816402c7c0ee1658b959e8c10540cfbc60ce96c2a", size = 449177, upload-time = "2025-05-18T04:54:46.127Z" }, - { url = "https://files.pythonhosted.org/packages/8e/63/f33d6fd50d8711f305f07ad8c7b4a25f2092288f376f484c979dcf277b07/fastavro-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3573340e4564e8962e22f814ac937ffe0d4be5eabbd2250f77738dc47e3c8fe9", size = 957526, upload-time = "2025-05-18T04:54:47.701Z" }, - { url = "https://files.pythonhosted.org/packages/f4/09/a57ad9d8cb9b8affb2e43c29d8fb8cbdc0f1156f8496067a0712c944bacc/fastavro-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7291cf47735b8bd6ff5d9b33120e6e0974f52fd5dff90cd24151b22018e7fd29", size = 3322808, upload-time = "2025-05-18T04:54:50.419Z" }, - { url = "https://files.pythonhosted.org/packages/86/70/d6df59309d3754d6d4b0c7beca45b9b1a957d6725aed8da3aca247db3475/fastavro-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf3bb065d657d5bac8b2cb39945194aa086a9b3354f2da7f89c30e4dc20e08e2", size = 3330870, upload-time = "2025-05-18T04:54:52.406Z" }, - { url = "https://files.pythonhosted.org/packages/ad/ea/122315154d2a799a2787058435ef0d4d289c0e8e575245419436e9b702ca/fastavro-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8758317c85296b848698132efb13bc44a4fbd6017431cc0f26eaeb0d6fa13d35", size = 3343369, upload-time = "2025-05-18T04:54:54.652Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/7800de5fec36d55a818adf3db3b085b1a033c4edd60323cf6ca0754cf8cb/fastavro-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ad99d57228f83bf3e2214d183fbf6e2fda97fd649b2bdaf8e9110c36cbb02624", size = 3430629, upload-time = "2025-05-18T04:54:56.513Z" }, - { url = "https://files.pythonhosted.org/packages/48/65/2b74ccfeba9dcc3f7dbe64907307386b4a0af3f71d2846f63254df0f1e1d/fastavro-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:9134090178bdbf9eefd467717ced3dc151e27a7e7bfc728260ce512697efe5a4", size = 451621, upload-time = "2025-05-18T04:54:58.156Z" }, - { url = "https://files.pythonhosted.org/packages/99/58/8e789b0a2f532b22e2d090c20d27c88f26a5faadcba4c445c6958ae566cf/fastavro-1.11.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e8bc238f2637cd5d15238adbe8fb8c58d2e6f1870e0fb28d89508584670bae4b", size = 939583, upload-time = "2025-05-18T04:54:59.853Z" }, - { url = "https://files.pythonhosted.org/packages/34/3f/02ed44742b1224fe23c9fc9b9b037fc61769df716c083cf80b59a02b9785/fastavro-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b403933081c83fc4d8a012ee64b86e560a024b1280e3711ee74f2abc904886e8", size = 3257734, upload-time = "2025-05-18T04:55:02.366Z" }, - { url = "https://files.pythonhosted.org/packages/cc/bc/9cc8b19eeee9039dd49719f8b4020771e805def262435f823fa8f27ddeea/fastavro-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f6ecb4b5f77aa756d973b7dd1c2fb4e4c95b4832a3c98b059aa96c61870c709", size = 3318218, upload-time = "2025-05-18T04:55:04.352Z" }, - { url = "https://files.pythonhosted.org/packages/39/77/3b73a986606494596b6d3032eadf813a05b59d1623f54384a23de4217d5f/fastavro-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:059893df63ef823b0231b485c9d43016c7e32850cae7bf69f4e9d46dd41c28f2", size = 3297296, upload-time = "2025-05-18T04:55:06.175Z" }, - { url = "https://files.pythonhosted.org/packages/8e/1c/b69ceef6494bd0df14752b5d8648b159ad52566127bfd575e9f5ecc0c092/fastavro-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5120ffc9a200699218e01777e695a2f08afb3547ba818184198c757dc39417bd", size = 3438056, upload-time = "2025-05-18T04:55:08.276Z" }, - { url = "https://files.pythonhosted.org/packages/ef/11/5c2d0db3bd0e6407546fabae9e267bb0824eacfeba79e7dd81ad88afa27d/fastavro-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:7bb9d0d2233f33a52908b6ea9b376fe0baf1144bdfdfb3c6ad326e200a8b56b0", size = 442824, upload-time = "2025-05-18T04:55:10.385Z" }, - { url = "https://files.pythonhosted.org/packages/ec/08/8e25b9e87a98f8c96b25e64565fa1a1208c0095bb6a84a5c8a4b925688a5/fastavro-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f963b8ddaf179660e814ab420850c1b4ea33e2ad2de8011549d958b21f77f20a", size = 931520, upload-time = "2025-05-18T04:55:11.614Z" }, - { url = "https://files.pythonhosted.org/packages/02/ee/7cf5561ef94781ed6942cee6b394a5e698080f4247f00f158ee396ec244d/fastavro-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0253e5b6a3c9b62fae9fc3abd8184c5b64a833322b6af7d666d3db266ad879b5", size = 3195989, upload-time = "2025-05-18T04:55:13.732Z" }, - { url = "https://files.pythonhosted.org/packages/b3/31/f02f097d79f090e5c5aca8a743010c4e833a257c0efdeb289c68294f7928/fastavro-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca637b150e1f4c0e8e564fad40a16bd922bcb7ffd1a6e4836e6084f2c4f4e8db", size = 3239755, upload-time = "2025-05-18T04:55:16.463Z" }, - { url = "https://files.pythonhosted.org/packages/09/4c/46626b4ee4eb8eb5aa7835973c6ba8890cf082ef2daface6071e788d2992/fastavro-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76af1709031621828ca6ce7f027f7711fa33ac23e8269e7a5733996ff8d318da", size = 3243788, upload-time = "2025-05-18T04:55:18.544Z" }, - { url = "https://files.pythonhosted.org/packages/a7/6f/8ed42524e9e8dc0554f0f211dd1c6c7a9dde83b95388ddcf7c137e70796f/fastavro-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8224e6d8d9864d4e55dafbe88920d6a1b8c19cc3006acfac6aa4f494a6af3450", size = 3378330, upload-time = "2025-05-18T04:55:20.887Z" }, - { url = "https://files.pythonhosted.org/packages/b8/51/38cbe243d5facccab40fc43a4c17db264c261be955ce003803d25f0da2c3/fastavro-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:cde7ed91b52ff21f0f9f157329760ba7251508ca3e9618af3ffdac986d9faaa2", size = 443115, upload-time = "2025-05-18T04:55:22.107Z" }, - { url = "https://files.pythonhosted.org/packages/d0/57/0d31ed1a49c65ad9f0f0128d9a928972878017781f9d4336f5f60982334c/fastavro-1.11.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e5ed1325c1c414dd954e7a2c5074daefe1eceb672b8c727aa030ba327aa00693", size = 1021401, upload-time = "2025-05-18T04:55:23.431Z" }, - { url = "https://files.pythonhosted.org/packages/56/7a/a3f1a75fbfc16b3eff65dc0efcdb92364967923194312b3f8c8fc2cb95be/fastavro-1.11.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cd3c95baeec37188899824faf44a5ee94dfc4d8667b05b2f867070c7eb174c4", size = 3384349, upload-time = "2025-05-18T04:55:25.575Z" }, - { url = "https://files.pythonhosted.org/packages/be/84/02bceb7518867df84027232a75225db758b9b45f12017c9743f45b73101e/fastavro-1.11.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e0babcd81acceb4c60110af9efa25d890dbb68f7de880f806dadeb1e70fe413", size = 3240658, upload-time = "2025-05-18T04:55:27.633Z" }, - { url = "https://files.pythonhosted.org/packages/f2/17/508c846c644d39bc432b027112068b8e96e7560468304d4c0757539dd73a/fastavro-1.11.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c0cb8063c7208b53b6867983dc6ae7cc80b91116b51d435d2610a5db2fc52f", size = 3372809, upload-time = "2025-05-18T04:55:30.063Z" }, - { url = "https://files.pythonhosted.org/packages/fe/84/9c2917a70ed570ddbfd1d32ac23200c1d011e36c332e59950d2f6d204941/fastavro-1.11.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1bc2824e9969c04ab6263d269a1e0e5d40b9bd16ade6b70c29d6ffbc4f3cc102", size = 3387171, upload-time = "2025-05-18T04:55:32.531Z" }, +version = "1.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/ec/762dcf213e5b97ea1733b27d5a2798599a1fa51565b70a93690246029f84/fastavro-1.12.0.tar.gz", hash = "sha256:a67a87be149825d74006b57e52be068dfa24f3bfc6382543ec92cd72327fe152", size = 1025604, upload-time = "2025-07-31T15:16:42.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/51/6bd93f2c9f3bb98f84ee0ddb436eb46a308ec53e884d606b70ca9d6b132d/fastavro-1.12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56f78d1d527bea4833945c3a8c716969ebd133c5762e2e34f64c795bd5a10b3e", size = 962215, upload-time = "2025-07-31T15:16:58.173Z" }, + { url = "https://files.pythonhosted.org/packages/32/37/3e2e429cefe03d1fa98cc4c4edae1d133dc895db64dabe84c17b4dc0921c/fastavro-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7ce0d117642bb4265ef6e1619ec2d93e942a98f60636e3c0fbf1eb438c49026", size = 3412716, upload-time = "2025-07-31T15:17:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/33/28/eb37d9738ea3649bdcab1b6d4fd0facf9c36261623ea368554734d5d6821/fastavro-1.12.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52e9d9648aad4cca5751bcbe2d3f98e85afb0ec6c6565707f4e2f647ba83ba85", size = 3439283, upload-time = "2025-07-31T15:17:02.505Z" }, + { url = "https://files.pythonhosted.org/packages/57/6f/7aba4efbf73fd80ca20aa1db560936c222dd1b4e5cadbf9304361b9065e3/fastavro-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6183875381ec1cf85a1891bf46696fd1ec2ad732980e7bccc1e52e9904e7664d", size = 3354728, upload-time = "2025-07-31T15:17:04.705Z" }, + { url = "https://files.pythonhosted.org/packages/bf/2d/b0d8539f4622ebf5355b7898ac7930b1ff638de85b6c3acdd0718e05d09e/fastavro-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5ad00a2b94d3c8bf9239acf92d56e3e457e1d188687a8d80f31e858ccf91a6d6", size = 3442598, upload-time = "2025-07-31T15:17:06.986Z" }, + { url = "https://files.pythonhosted.org/packages/fe/33/882154b17e0fd468f1a5ae8cc903805531e1fcb699140315366c5f8ec20d/fastavro-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:6c4d1c276ff1410f3830648bb43312894ad65709ca0cb54361e28954387a46ac", size = 451836, upload-time = "2025-07-31T15:17:08.219Z" }, + { url = "https://files.pythonhosted.org/packages/4a/f0/df076a541144d2f351820f3d9e20afa0e4250e6e63cb5a26f94688ed508c/fastavro-1.12.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e849c70198e5bdf6f08df54a68db36ff72bd73e8f14b1fd664323df073c496d8", size = 944288, upload-time = "2025-07-31T15:17:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/52/1d/5c1ea0f6e98a441953de822c7455c9ce8c3afdc7b359dd23c5a5e5039249/fastavro-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b260e1cdc9a77853a2586b32208302c08dddfb5c20720b5179ac5330e06ce698", size = 3404895, upload-time = "2025-07-31T15:17:11.939Z" }, + { url = "https://files.pythonhosted.org/packages/36/8b/115a3ffe67fb48de0de704284fa5e793afa70932b8b2e915cc7545752f05/fastavro-1.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:181779688d8b80957953031f0d82ec0761be667a78e03dac642511ff996c771a", size = 3469935, upload-time = "2025-07-31T15:17:14.145Z" }, + { url = "https://files.pythonhosted.org/packages/14/f8/bf3b7370687ab21205e07b37acdd2455ca69f5d25c72d2b315faf357b1cd/fastavro-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6881caf914b36a57d1f90810f04a89bd9c837dd4a48e1b66a8b92136e85c415d", size = 3306148, upload-time = "2025-07-31T15:17:16.121Z" }, + { url = "https://files.pythonhosted.org/packages/97/55/fba2726b59a984c7aa2fc19c6e8ef1865eca6a3f66e78810d602ca22af59/fastavro-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8bf638248499eb78c422f12fedc08f9b90b5646c3368415e388691db60e7defb", size = 3442851, upload-time = "2025-07-31T15:17:18.738Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3e/25059b8fe0b8084fd858dca77caf0815d73e0ca4731485f34402e8d40c43/fastavro-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ed4f18b7c2f651a5ee2233676f62aac332995086768301aa2c1741859d70b53e", size = 445449, upload-time = "2025-07-31T15:17:20.438Z" }, + { url = "https://files.pythonhosted.org/packages/db/c7/f18b73b39860d54eb724f881b8932882ba10c1d4905e491cd25d159a7e49/fastavro-1.12.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dbe2b690d9caba7d888126cc1dd980a8fcf5ee73de41a104e3f15bb5e08c19c8", size = 936220, upload-time = "2025-07-31T15:17:21.994Z" }, + { url = "https://files.pythonhosted.org/packages/20/22/61ec800fda2a0f051a21b067e4005fd272070132d0a0566c5094e09b666c/fastavro-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:07ff9e6c6e8739203ccced3205646fdac6141c2efc83f4dffabf5f7d0176646d", size = 3348450, upload-time = "2025-07-31T15:17:24.186Z" }, + { url = "https://files.pythonhosted.org/packages/ca/79/1f34618fb643b99e08853e8a204441ec11a24d3e1fce050e804e6ff5c5ae/fastavro-1.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a172655add31882cab4e1a96b7d49f419906b465b4c2165081db7b1db79852f", size = 3417238, upload-time = "2025-07-31T15:17:26.531Z" }, + { url = "https://files.pythonhosted.org/packages/ea/0b/79611769eb15cc17992dc3699141feb0f75afd37b0cb964b4a08be45214e/fastavro-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:be20ce0331b70b35dca1a4c7808afeedf348dc517bd41602ed8fc9a1ac2247a9", size = 3252425, upload-time = "2025-07-31T15:17:28.989Z" }, + { url = "https://files.pythonhosted.org/packages/86/1a/65e0999bcc4bbb38df32706b6ae6ce626d528228667a5e0af059a8b25bb2/fastavro-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a52906681384a18b99b47e5f9eab64b4744d6e6bc91056b7e28641c7b3c59d2b", size = 3385322, upload-time = "2025-07-31T15:17:31.232Z" }, + { url = "https://files.pythonhosted.org/packages/e9/49/c06ebc9e5144f7463c2bfcb900ca01f87db934caf131bccbffc5d0aaf7ec/fastavro-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf153531191bcfc445c21e05dd97232a634463aa717cf99fb2214a51b9886bff", size = 445586, upload-time = "2025-07-31T15:17:32.634Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c8/46ab37076dc0f86bb255791baf9b3c3a20f77603a86a40687edacff8c03d/fastavro-1.12.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1928e88a760688e490118e1bedf0643b1f3727e5ba59c07ac64638dab81ae2a1", size = 1025933, upload-time = "2025-07-31T15:17:34.321Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7f/cb3e069dcc903034a6fe82182d92c75d981d86aee94bd028200a083696b3/fastavro-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd51b706a3ab3fe4af84a0b37f60d1bcd79295df18932494fc9f49db4ba2bab2", size = 3560435, upload-time = "2025-07-31T15:17:36.314Z" }, + { url = "https://files.pythonhosted.org/packages/d0/12/9478c28a2ac4fcc10ad9488dd3dcd5fac1ef550c3022c57840330e7cec4b/fastavro-1.12.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1148263931f6965e1942cf670f146148ca95b021ae7b7e1f98bf179f1c26cc58", size = 3453000, upload-time = "2025-07-31T15:17:38.875Z" }, + { url = "https://files.pythonhosted.org/packages/00/32/a5c8b3af9561c308c8c27da0be998b6237a47dbbdd8d5499f02731bd4073/fastavro-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4099e0f6fb8a55f59891c0aed6bfa90c4d20a774737e5282c74181b4703ea0cb", size = 3383233, upload-time = "2025-07-31T15:17:40.833Z" }, + { url = "https://files.pythonhosted.org/packages/42/a0/f6290f3f8059543faf3ef30efbbe9bf3e4389df881891136cd5fb1066b64/fastavro-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:10c586e9e3bab34307f8e3227a2988b6e8ac49bff8f7b56635cf4928a153f464", size = 3402032, upload-time = "2025-07-31T15:17:42.958Z" }, ] [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + +[[package]] +name = "fsspec" +version = "2025.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19", size = 304847, upload-time = "2025-09-02T19:10:49.215Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7", size = 199289, upload-time = "2025-09-02T19:10:47.708Z" }, ] [[package]] -name = "fsspec" -version = "2025.5.1" +name = "genai-prices" +version = "0.0.27" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033, upload-time = "2025-05-24T12:03:23.792Z" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/f1/e9da3299662343f4757e7113bda469f9a3fcdec03a57e6f926ecae790620/genai_prices-0.0.27.tar.gz", hash = "sha256:e0ac07c9af75c6cd28c3feab5ed4dd7299e459975927145f1aa25317db3fb24d", size = 45451, upload-time = "2025-09-10T19:02:20.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052, upload-time = "2025-05-24T12:03:21.66Z" }, + { url = "https://files.pythonhosted.org/packages/43/75/f2e11c7a357289934a26e45d60eb9892523e5e9b07ad886be7a8a35078b1/genai_prices-0.0.27-py3-none-any.whl", hash = "sha256:3f95bf72378ddfc88992755e33f1b208f15242697807d71ade5c1627caa56ce1", size = 48053, upload-time = "2025-09-10T19:02:19.416Z" }, ] [[package]] name = "google-auth" -version = "2.40.2" +version = "2.40.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, { name = "pyasn1-modules" }, { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/84/f67f53c505a6b2c5da05c988e2a5483f5ba9eee4b1841d2e3ff22f547cd5/google_auth-2.40.2.tar.gz", hash = "sha256:a33cde547a2134273226fa4b853883559947ebe9207521f7afc707efbf690f58", size = 280990, upload-time = "2025-05-21T18:04:59.816Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/9b/e92ef23b84fa10a64ce4831390b7a4c2e53c0132568d99d4ae61d04c8855/google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77", size = 281029, upload-time = "2025-06-04T18:04:57.577Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c7/e2d82e6702e2a9e2311c138f8e1100f21d08aed0231290872b229ae57a86/google_auth-2.40.2-py2.py3-none-any.whl", hash = "sha256:f7e568d42eedfded58734f6a60c58321896a621f7c116c411550a4b4a13da90b", size = 216102, upload-time = "2025-05-21T18:04:57.547Z" }, + { url = "https://files.pythonhosted.org/packages/17/63/b19553b658a1692443c62bd07e5868adaa0ad746a0751ba62c59568cd45b/google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca", size = 216137, upload-time = "2025-06-04T18:04:55.573Z" }, ] [[package]] name = "google-genai" -version = "1.16.1" +version = "1.38.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -481,12 +763,13 @@ dependencies = [ { name = "httpx" }, { name = "pydantic" }, { name = "requests" }, + { name = "tenacity" }, { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/1f/1a52736e87b4a22afef615de45e2f509fbfb55c09798620b0c3f394076ef/google_genai-1.16.1.tar.gz", hash = "sha256:4b4ed4ed781a9d61e5ce0fef1486dd3a5d7ff0a73bd76b9633d21e687ab998df", size = 194270, upload-time = "2025-05-20T01:05:26.717Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b4/11/108ddd3aca8af6a9e2369e59b9646a3a4c64aefb39d154f6467ab8d79f34/google_genai-1.38.0.tar.gz", hash = "sha256:363272fc4f677d0be6a1aed7ebabe8adf45e1626a7011a7886a587e9464ca9ec", size = 244903, upload-time = "2025-09-16T23:25:42.577Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/31/30caa8d4ae987e47c5250fb6680588733863fd5b39cacb03ba1977c29bde/google_genai-1.16.1-py3-none-any.whl", hash = "sha256:6ae5d24282244f577ca4f0d95c09f75ab29e556602c9d3531b70161e34cd2a39", size = 196327, upload-time = "2025-05-20T01:05:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/1de711bab3c118284904c3bedf870519e8c63a7a8e0905ac3833f1db9cbc/google_genai-1.38.0-py3-none-any.whl", hash = "sha256:95407425132d42b3fa11bc92b3f5cf61a0fbd8d9add1f0e89aac52c46fbba090", size = 245558, upload-time = "2025-09-16T23:25:41.141Z" }, ] [[package]] @@ -501,21 +784,63 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, ] +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, + { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, + { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, + { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, + { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, +] + [[package]] name = "griffe" -version = "1.7.3" +version = "1.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a9/3e/5aa9a61f7c3c47b0b52a1d930302992229d191bf4bc76447b324b731510a/griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b", size = 395137, upload-time = "2025-04-23T11:29:09.147Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/d7/6c09dd7ce4c7837e4cdb11dce980cb45ae3cd87677298dc3b781b6bce7d3/griffe-1.14.0.tar.gz", hash = "sha256:9d2a15c1eca966d68e00517de5d69dd1bc5c9f2335ef6c1775362ba5b8651a13", size = 424684, upload-time = "2025-09-05T15:02:29.167Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/c6/5c20af38c2a57c15d87f7f38bee77d63c1d2a3689f74fefaf35915dd12b2/griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75", size = 129303, upload-time = "2025-04-23T11:29:07.145Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b1/9ff6578d789a89812ff21e4e0f80ffae20a65d5dd84e7a17873fe3b365be/griffe-1.14.0-py3-none-any.whl", hash = "sha256:0e9d52832cccf0f7188cfe585ba962d2674b241c01916d780925df34873bceb0", size = 144439, upload-time = "2025-09-05T15:02:27.511Z" }, ] [[package]] name = "groq" -version = "0.25.0" +version = "0.31.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -525,9 +850,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/fc/29e9c24ab59602747027f41b9d761d24cf9e5771014c9a731137f51e9cce/groq-0.25.0.tar.gz", hash = "sha256:6e1c7466b0da0130498187b825bd239f86fb77bf7551eacfbfa561d75048746a", size = 128199, upload-time = "2025-05-16T19:57:43.381Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/e9/f5d523ae8c78aa375addf44d1f64206271d43e6b42d4e5ce3dc76563a75b/groq-0.31.1.tar.gz", hash = "sha256:4d611e0100cb22732c43b53af37933a1b8a5c5a18fa96132fee14e6c15d737e6", size = 141400, upload-time = "2025-09-04T18:01:06.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/11/1019a6cfdb2e520cb461cf70d859216be8ca122ddf5ad301fc3b0ee45fd4/groq-0.25.0-py3-none-any.whl", hash = "sha256:aadc78b40b1809cdb196b1aa8c7f7293108767df1508cafa3e0d5045d9328e7a", size = 129371, upload-time = "2025-05-16T19:57:41.786Z" }, + { url = "https://files.pythonhosted.org/packages/d6/7d/877dbef7d72efacc657777b2e7897baa7cc7fcd0905f1b4a6423269e12a1/groq-0.31.1-py3-none-any.whl", hash = "sha256:536bd5dd6267dea5b3710e41094c0479748da2d155b9e073650e94b7fb2d71e8", size = 134903, upload-time = "2025-09-04T18:01:04.029Z" }, ] [[package]] @@ -541,17 +866,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.2" +version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/be/58f20728a5b445f8b064e74f0618897b3439f5ef90934da1916b9dfac76f/hf_xet-1.1.2.tar.gz", hash = "sha256:3712d6d4819d3976a1c18e36db9f503e296283f9363af818f50703506ed63da3", size = 467009, upload-time = "2025-05-16T20:44:34.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/ae/f1a63f75d9886f18a80220ba31a1c7b9c4752f03aae452f358f538c6a991/hf_xet-1.1.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:dfd1873fd648488c70735cb60f7728512bca0e459e61fcd107069143cd798469", size = 2642559, upload-time = "2025-05-16T20:44:30.217Z" }, - { url = "https://files.pythonhosted.org/packages/50/ab/d2c83ae18f1015d926defd5bfbe94c62d15e93f900e6a192e318ee947105/hf_xet-1.1.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:29b584983b2d977c44157d9241dcf0fd50acde0b7bff8897fe4386912330090d", size = 2541360, upload-time = "2025-05-16T20:44:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a7/693dc9f34f979e30a378125e2150a0b2d8d166e6d83ce3950eeb81e560aa/hf_xet-1.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b29ac84298147fe9164cc55ad994ba47399f90b5d045b0b803b99cf5f06d8ec", size = 5183081, upload-time = "2025-05-16T20:44:27.505Z" }, - { url = "https://files.pythonhosted.org/packages/3d/23/c48607883f692a36c0a7735f47f98bad32dbe459a32d1568c0f21576985d/hf_xet-1.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d921ba32615676e436a0d15e162331abc9ed43d440916b1d836dc27ce1546173", size = 5356100, upload-time = "2025-05-16T20:44:25.681Z" }, - { url = "https://files.pythonhosted.org/packages/eb/5b/b2316c7f1076da0582b52ea228f68bea95e243c388440d1dc80297c9d813/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d9b03c34e13c44893ab6e8fea18ee8d2a6878c15328dd3aabedbdd83ee9f2ed3", size = 5647688, upload-time = "2025-05-16T20:44:31.867Z" }, - { url = "https://files.pythonhosted.org/packages/2c/98/e6995f0fa579929da7795c961f403f4ee84af36c625963f52741d56f242c/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01b18608955b3d826307d37da8bd38b28a46cd2d9908b3a3655d1363274f941a", size = 5322627, upload-time = "2025-05-16T20:44:33.677Z" }, - { url = "https://files.pythonhosted.org/packages/59/40/8f1d5a44a64d8bf9e3c19576e789f716af54875b46daae65426714e75db1/hf_xet-1.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:3562902c81299b09f3582ddfb324400c6a901a2f3bc854f83556495755f4954c", size = 2739542, upload-time = "2025-05-16T20:44:36.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -584,15 +909,15 @@ wheels = [ [[package]] name = "httpx-limiter" -version = "0.3.0" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "aiolimiter" }, { name = "httpx" }, + { name = "pyrate-limiter" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/72/b8ef470dca30babce55fd9e59756b682999c757417adaf0ee99d846e5705/httpx_limiter-0.3.0.tar.gz", hash = "sha256:4d0c422edc40d41f882e94718466cbe91d3877097afe67bd3f55a9c0df3ea321", size = 11852, upload-time = "2025-05-10T21:19:11.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/8d/77c18a5d147e0e8ddc6fe124d9e48ea43e52ba9f7c91a5ab49e4909550f5/httpx_limiter-0.4.0.tar.gz", hash = "sha256:b1c6a39f4bad7654fdd934da1e0119cd91e9bd2ad61b9adad623cd7081c1a3b7", size = 13603, upload-time = "2025-08-22T10:11:23.731Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/f6/a71ea5bef3aa9bb34ef6e3b017b40616ceccb60621b234112be39d6fbc79/httpx_limiter-0.3.0-py3-none-any.whl", hash = "sha256:69f6e350456d2fe6eea5a36508098a925df16ef15e3d96d4abddd73fa0017625", size = 12667, upload-time = "2025-05-10T21:19:10.006Z" }, + { url = "https://files.pythonhosted.org/packages/23/94/b2d08aaadd219313d4ec8c843a53643779815c2ef06e8982f79acc57f1d2/httpx_limiter-0.4.0-py3-none-any.whl", hash = "sha256:33d914c442bce14fc1d8f28e0a954c87d9f5f5a82b51a6778f1f1a3506d9e6ac", size = 15954, upload-time = "2025-08-22T10:11:22.348Z" }, ] [[package]] @@ -606,7 +931,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "0.32.1" +version = "0.35.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -618,9 +943,14 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/4d/7a1f24199a4a6f1c8e47c3b5e0a7faf44e249fec5afb7e7f6000bb87e513/huggingface_hub-0.32.1.tar.gz", hash = "sha256:770acdae5ad973447074e10a98044306e567ff36012419ae80c051f446156551", size = 422371, upload-time = "2025-05-26T09:51:21.427Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/42/0e7be334a6851cd7d51cc11717cb95e89333ebf0064431c0255c56957526/huggingface_hub-0.35.1.tar.gz", hash = "sha256:3585b88c5169c64b7e4214d0e88163d4a709de6d1a502e0cd0459e9ee2c9c572", size = 461374, upload-time = "2025-09-23T13:43:47.074Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/cd/4fbfa8e937b89272a75805dc895cf3c7f648e1ba6ee431f8f6bf27bc1255/huggingface_hub-0.32.1-py3-none-any.whl", hash = "sha256:b7e644f8ba6c6ad975c436960eacc026c83ba2c2bc5ae8b4e3f7ce2b292e6b11", size = 509412, upload-time = "2025-05-26T09:51:19.269Z" }, + { url = "https://files.pythonhosted.org/packages/f1/60/4acf0c8a3925d9ff491dc08fe84d37e09cfca9c3b885e0db3d4dedb98cea/huggingface_hub-0.35.1-py3-none-any.whl", hash = "sha256:2f0e2709c711e3040e31d3e0418341f7092910f1462dd00350c4e97af47280a8", size = 563340, upload-time = "2025-09-23T13:43:45.343Z" }, +] + +[package.optional-dependencies] +inference = [ + { name = "aiohttp" }, ] [[package]] @@ -634,14 +964,14 @@ wheels = [ [[package]] name = "importlib-metadata" -version = "8.6.1" +version = "8.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/08/c1395a292bb23fd03bdf572a1357c5a733d3eecbab877641ceacab23db6e/importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580", size = 55767, upload-time = "2025-01-20T22:21:30.429Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971, upload-time = "2025-01-20T22:21:29.177Z" }, + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, ] [[package]] @@ -653,76 +983,86 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "invoke" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/42/127e6d792884ab860defc3f4d80a8f9812e48ace584ffc5a346de58cdc6c/invoke-2.2.0.tar.gz", hash = "sha256:ee6cbb101af1a859c7fe84f2a264c059020b0cb7fe3535f9424300ab568f6bd5", size = 299835, upload-time = "2023-07-12T18:05:17.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl", hash = "sha256:6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820", size = 160274, upload-time = "2023-07-12T18:05:16.294Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + [[package]] name = "jiter" -version = "0.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/7e/4011b5c77bec97cb2b572f566220364e3e21b51c48c5bd9c4a9c26b41b67/jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303", size = 317215, upload-time = "2025-05-18T19:03:04.303Z" }, - { url = "https://files.pythonhosted.org/packages/8a/4f/144c1b57c39692efc7ea7d8e247acf28e47d0912800b34d0ad815f6b2824/jiter-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32bb468e3af278f095d3fa5b90314728a6916d89ba3d0ffb726dd9bf7367285e", size = 322814, upload-time = "2025-05-18T19:03:06.433Z" }, - { url = "https://files.pythonhosted.org/packages/63/1f/db977336d332a9406c0b1f0b82be6f71f72526a806cbb2281baf201d38e3/jiter-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8b3e0068c26ddedc7abc6fac37da2d0af16b921e288a5a613f4b86f050354f", size = 345237, upload-time = "2025-05-18T19:03:07.833Z" }, - { url = "https://files.pythonhosted.org/packages/d7/1c/aa30a4a775e8a672ad7f21532bdbfb269f0706b39c6ff14e1f86bdd9e5ff/jiter-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:286299b74cc49e25cd42eea19b72aa82c515d2f2ee12d11392c56d8701f52224", size = 370999, upload-time = "2025-05-18T19:03:09.338Z" }, - { url = "https://files.pythonhosted.org/packages/35/df/f8257abc4207830cb18880781b5f5b716bad5b2a22fb4330cfd357407c5b/jiter-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ed5649ceeaeffc28d87fb012d25a4cd356dcd53eff5acff1f0466b831dda2a7", size = 491109, upload-time = "2025-05-18T19:03:11.13Z" }, - { url = "https://files.pythonhosted.org/packages/06/76/9e1516fd7b4278aa13a2cc7f159e56befbea9aa65c71586305e7afa8b0b3/jiter-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ab0051160cb758a70716448908ef14ad476c3774bd03ddce075f3c1f90a3d6", size = 388608, upload-time = "2025-05-18T19:03:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/67750672b4354ca20ca18d3d1ccf2c62a072e8a2d452ac3cf8ced73571ef/jiter-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03997d2f37f6b67d2f5c475da4412be584e1cec273c1cfc03d642c46db43f8cf", size = 352454, upload-time = "2025-05-18T19:03:14.741Z" }, - { url = "https://files.pythonhosted.org/packages/96/4d/5c4e36d48f169a54b53a305114be3efa2bbffd33b648cd1478a688f639c1/jiter-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c404a99352d839fed80d6afd6c1d66071f3bacaaa5c4268983fc10f769112e90", size = 391833, upload-time = "2025-05-18T19:03:16.426Z" }, - { url = "https://files.pythonhosted.org/packages/0b/de/ce4a6166a78810bd83763d2fa13f85f73cbd3743a325469a4a9289af6dae/jiter-0.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66e989410b6666d3ddb27a74c7e50d0829704ede652fd4c858e91f8d64b403d0", size = 523646, upload-time = "2025-05-18T19:03:17.704Z" }, - { url = "https://files.pythonhosted.org/packages/a2/a6/3bc9acce53466972964cf4ad85efecb94f9244539ab6da1107f7aed82934/jiter-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b532d3af9ef4f6374609a3bcb5e05a1951d3bf6190dc6b176fdb277c9bbf15ee", size = 514735, upload-time = "2025-05-18T19:03:19.44Z" }, - { url = "https://files.pythonhosted.org/packages/b4/d8/243c2ab8426a2a4dea85ba2a2ba43df379ccece2145320dfd4799b9633c5/jiter-0.10.0-cp310-cp310-win32.whl", hash = "sha256:da9be20b333970e28b72edc4dff63d4fec3398e05770fb3205f7fb460eb48dd4", size = 210747, upload-time = "2025-05-18T19:03:21.184Z" }, - { url = "https://files.pythonhosted.org/packages/37/7a/8021bd615ef7788b98fc76ff533eaac846322c170e93cbffa01979197a45/jiter-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:f59e533afed0c5b0ac3eba20d2548c4a550336d8282ee69eb07b37ea526ee4e5", size = 207484, upload-time = "2025-05-18T19:03:23.046Z" }, - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617, upload-time = "2025-05-18T19:04:02.078Z" }, - { url = "https://files.pythonhosted.org/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947, upload-time = "2025-05-18T19:04:03.347Z" }, - { url = "https://files.pythonhosted.org/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618, upload-time = "2025-05-18T19:04:04.709Z" }, - { url = "https://files.pythonhosted.org/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829, upload-time = "2025-05-18T19:04:06.912Z" }, - { url = "https://files.pythonhosted.org/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034, upload-time = "2025-05-18T19:04:08.222Z" }, - { url = "https://files.pythonhosted.org/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529, upload-time = "2025-05-18T19:04:09.566Z" }, - { url = "https://files.pythonhosted.org/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671, upload-time = "2025-05-18T19:04:10.98Z" }, - { url = "https://files.pythonhosted.org/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864, upload-time = "2025-05-18T19:04:12.722Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989, upload-time = "2025-05-18T19:04:14.261Z" }, - { url = "https://files.pythonhosted.org/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495, upload-time = "2025-05-18T19:04:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289, upload-time = "2025-05-18T19:04:17.541Z" }, - { url = "https://files.pythonhosted.org/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074, upload-time = "2025-05-18T19:04:19.21Z" }, - { url = "https://files.pythonhosted.org/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225, upload-time = "2025-05-18T19:04:20.583Z" }, - { url = "https://files.pythonhosted.org/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235, upload-time = "2025-05-18T19:04:22.363Z" }, - { url = "https://files.pythonhosted.org/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278, upload-time = "2025-05-18T19:04:23.627Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866, upload-time = "2025-05-18T19:04:24.891Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772, upload-time = "2025-05-18T19:04:26.161Z" }, - { url = "https://files.pythonhosted.org/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534, upload-time = "2025-05-18T19:04:27.495Z" }, - { url = "https://files.pythonhosted.org/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087, upload-time = "2025-05-18T19:04:28.896Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694, upload-time = "2025-05-18T19:04:30.183Z" }, - { url = "https://files.pythonhosted.org/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992, upload-time = "2025-05-18T19:04:32.028Z" }, - { url = "https://files.pythonhosted.org/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723, upload-time = "2025-05-18T19:04:33.467Z" }, - { url = "https://files.pythonhosted.org/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215, upload-time = "2025-05-18T19:04:34.827Z" }, - { url = "https://files.pythonhosted.org/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762, upload-time = "2025-05-18T19:04:36.19Z" }, - { url = "https://files.pythonhosted.org/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427, upload-time = "2025-05-18T19:04:37.544Z" }, - { url = "https://files.pythonhosted.org/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127, upload-time = "2025-05-18T19:04:38.837Z" }, - { url = "https://files.pythonhosted.org/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527, upload-time = "2025-05-18T19:04:40.612Z" }, - { url = "https://files.pythonhosted.org/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213, upload-time = "2025-05-18T19:04:41.894Z" }, +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021, upload-time = "2025-09-15T09:19:43.523Z" }, + { url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384, upload-time = "2025-09-15T09:19:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389, upload-time = "2025-09-15T09:19:46.094Z" }, + { url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519, upload-time = "2025-09-15T09:19:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198, upload-time = "2025-09-15T09:19:49.116Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835, upload-time = "2025-09-15T09:19:50.468Z" }, + { url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655, upload-time = "2025-09-15T09:19:51.726Z" }, + { url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135, upload-time = "2025-09-15T09:19:53.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063, upload-time = "2025-09-15T09:19:54.447Z" }, + { url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139, upload-time = "2025-09-15T09:19:55.764Z" }, + { url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369, upload-time = "2025-09-15T09:19:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538, upload-time = "2025-09-15T09:19:58.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737, upload-time = "2025-09-15T09:19:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183, upload-time = "2025-09-15T09:20:01.442Z" }, + { url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225, upload-time = "2025-09-15T09:20:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/ee/3b/e7f45be7d3969bdf2e3cd4b816a7a1d272507cd0edd2d6dc4b07514f2d9a/jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72", size = 304414, upload-time = "2025-09-15T09:20:04.357Z" }, + { url = "https://files.pythonhosted.org/packages/06/32/13e8e0d152631fcc1907ceb4943711471be70496d14888ec6e92034e2caf/jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774", size = 314223, upload-time = "2025-09-15T09:20:05.631Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7e/abedd5b5a20ca083f778d96bba0d2366567fcecb0e6e34ff42640d5d7a18/jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0", size = 337306, upload-time = "2025-09-15T09:20:06.917Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/30d59bdc1204c86aa975ec72c48c482fee6633120ee9c3ab755e4dfefea8/jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a", size = 360565, upload-time = "2025-09-15T09:20:08.283Z" }, + { url = "https://files.pythonhosted.org/packages/fe/88/567288e0d2ed9fa8f7a3b425fdaf2cb82b998633c24fe0d98f5417321aa8/jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773", size = 486465, upload-time = "2025-09-15T09:20:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/18/6e/7b72d09273214cadd15970e91dd5ed9634bee605176107db21e1e4205eb1/jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7", size = 377581, upload-time = "2025-09-15T09:20:10.884Z" }, + { url = "https://files.pythonhosted.org/packages/58/52/4db456319f9d14deed325f70102577492e9d7e87cf7097bda9769a1fcacb/jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2", size = 347102, upload-time = "2025-09-15T09:20:12.175Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b4/433d5703c38b26083aec7a733eb5be96f9c6085d0e270a87ca6482cbf049/jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2", size = 386477, upload-time = "2025-09-15T09:20:13.428Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7a/a60bfd9c55b55b07c5c441c5085f06420b6d493ce9db28d069cc5b45d9f3/jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0", size = 516004, upload-time = "2025-09-15T09:20:14.848Z" }, + { url = "https://files.pythonhosted.org/packages/2e/46/f8363e5ecc179b4ed0ca6cb0a6d3bfc266078578c71ff30642ea2ce2f203/jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73", size = 507855, upload-time = "2025-09-15T09:20:16.176Z" }, + { url = "https://files.pythonhosted.org/packages/90/33/396083357d51d7ff0f9805852c288af47480d30dd31d8abc74909b020761/jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2", size = 205802, upload-time = "2025-09-15T09:20:17.661Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/eb06ca556b2551d41de7d03bf2ee24285fa3d0c58c5f8d95c64c9c3281b1/jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40", size = 313405, upload-time = "2025-09-15T09:20:18.918Z" }, + { url = "https://files.pythonhosted.org/packages/af/22/7ab7b4ec3a1c1f03aef376af11d23b05abcca3fb31fbca1e7557053b1ba2/jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406", size = 347102, upload-time = "2025-09-15T09:20:20.16Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] @@ -734,9 +1074,66 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, ] +[[package]] +name = "json-repair" +version = "0.51.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/3a/f30f3c92da3a285dcbe469c50b058f2d349dc9a20fc1b60c3219befda53f/json_repair-0.51.0.tar.gz", hash = "sha256:487e00042d5bc5cc4897ea9c3cccd4f6641e926b732cc09f98691a832485098a", size = 35289, upload-time = "2025-09-19T04:23:16.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/fc/eb15e39547b29dbf2b786bbbd1e79e7f1d87ec4e7c9ea61786f093181481/json_repair-0.51.0-py3-none-any.whl", hash = "sha256:871f7651ee82abf72efc50a80d3a9af0ade8abf5b4541b418eeeabe4e677e314", size = 26263, upload-time = "2025-09-19T04:23:15.064Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "language-tags" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/7e/b6a0efe4fee11e9742c1baaedf7c574084238a70b03c1d8eb2761383848f/language_tags-1.2.0.tar.gz", hash = "sha256:e934acba3e3dc85f867703eca421847a9ab7b7679b11b5d5cfd096febbf8bde6", size = 207901, upload-time = "2023-01-11T18:38:07.893Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/42/327554649ed2dd5ce59d3f5da176c7be20f9352c7c6c51597293660b7b08/language_tags-1.2.0-py3-none-any.whl", hash = "sha256:d815604622242fdfbbfd747b40c31213617fd03734a267f2e39ee4bd73c88722", size = 213449, upload-time = "2023-01-11T18:38:05.692Z" }, +] + +[[package]] +name = "linkify-it-py" +version = "2.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "uc-micro-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946, upload-time = "2024-02-04T14:48:04.179Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" }, +] + [[package]] name = "logfire" -version = "3.16.1" +version = "4.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "executing" }, @@ -745,53 +1142,229 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "protobuf" }, { name = "rich" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/1d/ec4d24a12b3e96e19e9874170c63ebdd2bcc118370fb60dd86a88b758f0e/logfire-3.16.1.tar.gz", hash = "sha256:de91504243737cf161d4704a9980fbe3640f1e20c6df5f1948cb1cc559356a28", size = 477077, upload-time = "2025-05-26T12:08:47.597Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/67/53bc8c72ae2deac94fe9dc51b9bade27c3f378469cf02336ae22558f2f41/logfire-4.10.0.tar.gz", hash = "sha256:5c1021dac8258d78d5fd08a336a22027df432c42ba70e96eef6cac7d8476a67c", size = 540375, upload-time = "2025-09-24T17:57:17.078Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/1b/f0a5677c470184a342987ee6cfda539fdc0e8cfaffc3808c24f64f203d43/logfire-3.16.1-py3-none-any.whl", hash = "sha256:0622089e776294f54de31ede0c6cb23d4891f8f7e4bd4dbd89ee5fed8eb8c27f", size = 194633, upload-time = "2025-05-26T12:08:43.952Z" }, + { url = "https://files.pythonhosted.org/packages/4e/41/bbf361fd3a0576adbadd173492a22fcb1a194128df7609e728038a4a4f2d/logfire-4.10.0-py3-none-any.whl", hash = "sha256:54514b6253eea4c4e28f587b55508cdacbc75a423670bb5147fc2af70c16f5d3", size = 223648, upload-time = "2025-09-24T17:57:13.905Z" }, +] + +[package.optional-dependencies] +httpx = [ + { name = "opentelemetry-instrumentation-httpx" }, ] [[package]] name = "logfire-api" -version = "3.16.1" +version = "4.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/d5/1fde2adc24a2535faee363cdb5a8a15fe0c0cc542d1f731c37cd4689e258/logfire_api-3.16.1.tar.gz", hash = "sha256:b624927dd2da1f3ce7031434a3db61ecbbfecb94d1e2636b9eb616adde0dfeee", size = 48243, upload-time = "2025-05-26T12:08:49.334Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/25/fb38c0e3f216ee72cda4d856147846f588a9ff9a863c2a981403916c3921/logfire_api-4.10.0.tar.gz", hash = "sha256:a9bf635a7c565c57f7c8145c0e7ac24ac4d34d0fb82774310d9b89d4c6968b6d", size = 55768, upload-time = "2025-09-24T17:57:18.735Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/a4/8200b279a44990ad9d4233f05c2bc4029ba02f25de51fee61f51bc5c5a98/logfire_api-3.16.1-py3-none-any.whl", hash = "sha256:da0d232fffadded58339b91a5a1b5f45c4bd05a62e9241c973de9c5bebe34521", size = 80121, upload-time = "2025-05-26T12:08:46.108Z" }, + { url = "https://files.pythonhosted.org/packages/22/e8/4355d4909eb1f07bba1ecf7a9b99be8bbc356db828e60b750e41dbb49dab/logfire_api-4.10.0-py3-none-any.whl", hash = "sha256:20819b2f3b43a53b66a500725553bdd52ed8c74f2147aa128c5ba5aa58668059", size = 92694, upload-time = "2025-09-24T17:57:15.686Z" }, +] + +[[package]] +name = "lxml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, + { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152, upload-time = "2025-09-22T04:00:51.709Z" }, + { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539, upload-time = "2025-09-22T04:00:53.593Z" }, + { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853, upload-time = "2025-09-22T04:00:55.524Z" }, + { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133, upload-time = "2025-09-22T04:00:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944, upload-time = "2025-09-22T04:00:59.052Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535, upload-time = "2025-09-22T04:01:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343, upload-time = "2025-09-22T04:01:03.13Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419, upload-time = "2025-09-22T04:01:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008, upload-time = "2025-09-22T04:01:07.327Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906, upload-time = "2025-09-22T04:01:09.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357, upload-time = "2025-09-22T04:01:11.102Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583, upload-time = "2025-09-22T04:01:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591, upload-time = "2025-09-22T04:01:14.874Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887, upload-time = "2025-09-22T04:01:17.265Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818, upload-time = "2025-09-22T04:01:19.688Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807, upload-time = "2025-09-22T04:01:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179, upload-time = "2025-09-22T04:01:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044, upload-time = "2025-09-22T04:01:25.118Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685, upload-time = "2025-09-22T04:01:27.398Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127, upload-time = "2025-09-22T04:01:29.629Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958, upload-time = "2025-09-22T04:01:31.535Z" }, + { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541, upload-time = "2025-09-22T04:01:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426, upload-time = "2025-09-22T04:01:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917, upload-time = "2025-09-22T04:01:37.448Z" }, + { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795, upload-time = "2025-09-22T04:01:39.165Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759, upload-time = "2025-09-22T04:01:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666, upload-time = "2025-09-22T04:01:43.363Z" }, + { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989, upload-time = "2025-09-22T04:01:45.215Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456, upload-time = "2025-09-22T04:01:48.243Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793, upload-time = "2025-09-22T04:01:50.042Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836, upload-time = "2025-09-22T04:01:52.145Z" }, + { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494, upload-time = "2025-09-22T04:01:54.242Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146, upload-time = "2025-09-22T04:01:56.282Z" }, + { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932, upload-time = "2025-09-22T04:01:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060, upload-time = "2025-09-22T04:02:00.812Z" }, + { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000, upload-time = "2025-09-22T04:02:02.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496, upload-time = "2025-09-22T04:02:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779, upload-time = "2025-09-22T04:02:06.689Z" }, + { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072, upload-time = "2025-09-22T04:02:08.587Z" }, + { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675, upload-time = "2025-09-22T04:02:10.783Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171, upload-time = "2025-09-22T04:02:12.631Z" }, + { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175, upload-time = "2025-09-22T04:02:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688, upload-time = "2025-09-22T04:02:16.957Z" }, + { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655, upload-time = "2025-09-22T04:02:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695, upload-time = "2025-09-22T04:02:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841, upload-time = "2025-09-22T04:02:22.489Z" }, + { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700, upload-time = "2025-09-22T04:02:24.465Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347, upload-time = "2025-09-22T04:02:26.286Z" }, + { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248, upload-time = "2025-09-22T04:02:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801, upload-time = "2025-09-22T04:02:30.113Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403, upload-time = "2025-09-22T04:02:32.119Z" }, + { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974, upload-time = "2025-09-22T04:02:34.155Z" }, + { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953, upload-time = "2025-09-22T04:02:36.054Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054, upload-time = "2025-09-22T04:02:38.154Z" }, + { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421, upload-time = "2025-09-22T04:02:40.413Z" }, + { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684, upload-time = "2025-09-22T04:02:42.288Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463, upload-time = "2025-09-22T04:02:44.165Z" }, + { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437, upload-time = "2025-09-22T04:02:46.524Z" }, + { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890, upload-time = "2025-09-22T04:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185, upload-time = "2025-09-22T04:02:50.746Z" }, + { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895, upload-time = "2025-09-22T04:02:52.968Z" }, + { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246, upload-time = "2025-09-22T04:02:54.798Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797, upload-time = "2025-09-22T04:02:57.058Z" }, + { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404, upload-time = "2025-09-22T04:02:58.966Z" }, + { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072, upload-time = "2025-09-22T04:03:38.05Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617, upload-time = "2025-09-22T04:03:39.835Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930, upload-time = "2025-09-22T04:03:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380, upload-time = "2025-09-22T04:03:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632, upload-time = "2025-09-22T04:03:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171, upload-time = "2025-09-22T04:03:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109, upload-time = "2025-09-22T04:03:07.452Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061, upload-time = "2025-09-22T04:03:09.297Z" }, + { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233, upload-time = "2025-09-22T04:03:11.651Z" }, + { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739, upload-time = "2025-09-22T04:03:13.592Z" }, + { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119, upload-time = "2025-09-22T04:03:15.408Z" }, + { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665, upload-time = "2025-09-22T04:03:17.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997, upload-time = "2025-09-22T04:03:19.14Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957, upload-time = "2025-09-22T04:03:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372, upload-time = "2025-09-22T04:03:23.27Z" }, + { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653, upload-time = "2025-09-22T04:03:25.767Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795, upload-time = "2025-09-22T04:03:27.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023, upload-time = "2025-09-22T04:03:30.056Z" }, + { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420, upload-time = "2025-09-22T04:03:32.198Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837, upload-time = "2025-09-22T04:03:34.027Z" }, + { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205, upload-time = "2025-09-22T04:03:36.249Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, + { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119, upload-time = "2025-09-22T04:04:51.801Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314, upload-time = "2025-09-22T04:04:55.024Z" }, + { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768, upload-time = "2025-09-22T04:04:57.097Z" }, ] [[package]] name = "markdown-it-py" -version = "3.0.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[package.optional-dependencies] +linkify = [ + { name = "linkify-it-py" }, +] +plugins = [ + { name = "mdit-py-plugins" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] [[package]] name = "mcp" -version = "1.9.1" +version = "1.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/bc/54aec2c334698cc575ca3b3481eed627125fb66544152fa1af927b1a495c/mcp-1.9.1.tar.gz", hash = "sha256:19879cd6dde3d763297617242888c2f695a95dfa854386a6a68676a646ce75e4", size = 316247, upload-time = "2025-05-22T15:52:21.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/e9/242096400d702924b49f8d202c6ded7efb8841cacba826b5d2e6183aef7b/mcp-1.14.1.tar.gz", hash = "sha256:31c4406182ba15e8f30a513042719c3f0a38c615e76188ee5a736aaa89e20134", size = 454944, upload-time = "2025-09-18T13:37:19.971Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/11/d334fbb7c2aeddd2e762b86d7a619acffae012643a5738e698f975a2a9e2/mcp-1.14.1-py3-none-any.whl", hash = "sha256:3b7a479e8e5cbf5361bdc1da8bc6d500d795dc3aff44b44077a363a7f7e945a4", size = 163809, upload-time = "2025-09-18T13:37:18.165Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/c0/4ac795585a22a0a2d09cd2b1187b0252d2afcdebd01e10a68bbac4d34890/mcp-1.9.1-py3-none-any.whl", hash = "sha256:2900ded8ffafc3c8a7bfcfe8bc5204037e988e753ec398f371663e6a06ecd9a9", size = 130261, upload-time = "2025-05-22T15:52:19.702Z" }, + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, ] [[package]] @@ -805,23 +1378,237 @@ wheels = [ [[package]] name = "mistralai" -version = "1.7.1" +version = "1.9.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "eval-type-backport" }, { name = "httpx" }, + { name = "invoke" }, { name = "pydantic" }, { name = "python-dateutil" }, + { name = "pyyaml" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/34/b819d228f4df173c1bfd42936c2c749f41a13ae0796d03cd55f955426842/mistralai-1.7.1.tar.gz", hash = "sha256:a0cd4632c8aad6d8b90f77713c4049185626ac9b2a0d82484407beef1a9d16f3", size = 142373, upload-time = "2025-05-22T15:08:18.247Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/ea/bc40e3c8cf6ac5672eae503601b1f8b766085a9cf07c2e45de4b0481c91f/mistralai-1.7.1-py3-none-any.whl", hash = "sha256:2ca97f9c2adac9509578e8b141a1875bee1d966a8dde4d90ffc05f1b904b0421", size = 302285, upload-time = "2025-05-22T15:08:16.718Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/6d/a3/1ae43c9db1fc612176d5d3418c12cd363852e954c5d12bf3a4477de2e4a6/mistralai-1.9.10.tar.gz", hash = "sha256:a95721276f035bf86c7fdc1373d7fb7d056d83510226f349426e0d522c0c0965", size = 205043, upload-time = "2025-09-02T07:44:38.859Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/40/646448b5ad66efec097471bd5ab25f5b08360e3f34aecbe5c4fcc6845c01/mistralai-1.9.10-py3-none-any.whl", hash = "sha256:cf0a2906e254bb4825209a26e1957e6e0bacbbe61875bd22128dc3d5d51a7b0a", size = 440538, upload-time = "2025-09-02T07:44:37.5Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/b1/ea4f68038a18c77c9467400d166d74c4ffa536f34761f7983a104357e614/msgpack-1.1.1.tar.gz", hash = "sha256:77b79ce34a2bdab2594f490c8e80dd62a02d650b91a75159a63ec413b8d104cd", size = 173555, upload-time = "2025-06-13T06:52:51.324Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/83/97f24bf9848af23fe2ba04380388216defc49a8af6da0c28cc636d722502/msgpack-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71ef05c1726884e44f8b1d1773604ab5d4d17729d8491403a705e649116c9558", size = 82728, upload-time = "2025-06-13T06:51:50.68Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/2eaa388267a78401f6e182662b08a588ef4f3de6f0eab1ec09736a7aaa2b/msgpack-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:36043272c6aede309d29d56851f8841ba907a1a3d04435e43e8a19928e243c1d", size = 79279, upload-time = "2025-06-13T06:51:51.72Z" }, + { url = "https://files.pythonhosted.org/packages/f8/46/31eb60f4452c96161e4dfd26dbca562b4ec68c72e4ad07d9566d7ea35e8a/msgpack-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a32747b1b39c3ac27d0670122b57e6e57f28eefb725e0b625618d1b59bf9d1e0", size = 423859, upload-time = "2025-06-13T06:51:52.749Z" }, + { url = "https://files.pythonhosted.org/packages/45/16/a20fa8c32825cc7ae8457fab45670c7a8996d7746ce80ce41cc51e3b2bd7/msgpack-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a8b10fdb84a43e50d38057b06901ec9da52baac6983d3f709d8507f3889d43f", size = 429975, upload-time = "2025-06-13T06:51:53.97Z" }, + { url = "https://files.pythonhosted.org/packages/86/ea/6c958e07692367feeb1a1594d35e22b62f7f476f3c568b002a5ea09d443d/msgpack-1.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0c325c3f485dc54ec298d8b024e134acf07c10d494ffa24373bea729acf704", size = 413528, upload-time = "2025-06-13T06:51:55.507Z" }, + { url = "https://files.pythonhosted.org/packages/75/05/ac84063c5dae79722bda9f68b878dc31fc3059adb8633c79f1e82c2cd946/msgpack-1.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:88daaf7d146e48ec71212ce21109b66e06a98e5e44dca47d853cbfe171d6c8d2", size = 413338, upload-time = "2025-06-13T06:51:57.023Z" }, + { url = "https://files.pythonhosted.org/packages/69/e8/fe86b082c781d3e1c09ca0f4dacd457ede60a13119b6ce939efe2ea77b76/msgpack-1.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8b55ea20dc59b181d3f47103f113e6f28a5e1c89fd5b67b9140edb442ab67f2", size = 422658, upload-time = "2025-06-13T06:51:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2b/bafc9924df52d8f3bb7c00d24e57be477f4d0f967c0a31ef5e2225e035c7/msgpack-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a28e8072ae9779f20427af07f53bbb8b4aa81151054e882aee333b158da8752", size = 427124, upload-time = "2025-06-13T06:51:59.969Z" }, + { url = "https://files.pythonhosted.org/packages/a2/3b/1f717e17e53e0ed0b68fa59e9188f3f610c79d7151f0e52ff3cd8eb6b2dc/msgpack-1.1.1-cp311-cp311-win32.whl", hash = "sha256:7da8831f9a0fdb526621ba09a281fadc58ea12701bc709e7b8cbc362feabc295", size = 65016, upload-time = "2025-06-13T06:52:01.294Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/9d1780768d3b249accecc5a38c725eb1e203d44a191f7b7ff1941f7df60c/msgpack-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fd1b58e1431008a57247d6e7cc4faa41c3607e8e7d4aaf81f7c29ea013cb458", size = 72267, upload-time = "2025-06-13T06:52:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/e3/26/389b9c593eda2b8551b2e7126ad3a06af6f9b44274eb3a4f054d48ff7e47/msgpack-1.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae497b11f4c21558d95de9f64fff7053544f4d1a17731c866143ed6bb4591238", size = 82359, upload-time = "2025-06-13T06:52:03.909Z" }, + { url = "https://files.pythonhosted.org/packages/ab/65/7d1de38c8a22cf8b1551469159d4b6cf49be2126adc2482de50976084d78/msgpack-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33be9ab121df9b6b461ff91baac6f2731f83d9b27ed948c5b9d1978ae28bf157", size = 79172, upload-time = "2025-06-13T06:52:05.246Z" }, + { url = "https://files.pythonhosted.org/packages/0f/bd/cacf208b64d9577a62c74b677e1ada005caa9b69a05a599889d6fc2ab20a/msgpack-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f64ae8fe7ffba251fecb8408540c34ee9df1c26674c50c4544d72dbf792e5ce", size = 425013, upload-time = "2025-06-13T06:52:06.341Z" }, + { url = "https://files.pythonhosted.org/packages/4d/ec/fd869e2567cc9c01278a736cfd1697941ba0d4b81a43e0aa2e8d71dab208/msgpack-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a494554874691720ba5891c9b0b39474ba43ffb1aaf32a5dac874effb1619e1a", size = 426905, upload-time = "2025-06-13T06:52:07.501Z" }, + { url = "https://files.pythonhosted.org/packages/55/2a/35860f33229075bce803a5593d046d8b489d7ba2fc85701e714fc1aaf898/msgpack-1.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb643284ab0ed26f6957d969fe0dd8bb17beb567beb8998140b5e38a90974f6c", size = 407336, upload-time = "2025-06-13T06:52:09.047Z" }, + { url = "https://files.pythonhosted.org/packages/8c/16/69ed8f3ada150bf92745fb4921bd621fd2cdf5a42e25eb50bcc57a5328f0/msgpack-1.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d275a9e3c81b1093c060c3837e580c37f47c51eca031f7b5fb76f7b8470f5f9b", size = 409485, upload-time = "2025-06-13T06:52:10.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/b6/0c398039e4c6d0b2e37c61d7e0e9d13439f91f780686deb8ee64ecf1ae71/msgpack-1.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fd6b577e4541676e0cc9ddc1709d25014d3ad9a66caa19962c4f5de30fc09ef", size = 412182, upload-time = "2025-06-13T06:52:11.644Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d0/0cf4a6ecb9bc960d624c93effaeaae75cbf00b3bc4a54f35c8507273cda1/msgpack-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb29aaa613c0a1c40d1af111abf025f1732cab333f96f285d6a93b934738a68a", size = 419883, upload-time = "2025-06-13T06:52:12.806Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/9697c211720fa71a2dfb632cad6196a8af3abea56eece220fde4674dc44b/msgpack-1.1.1-cp312-cp312-win32.whl", hash = "sha256:870b9a626280c86cff9c576ec0d9cbcc54a1e5ebda9cd26dab12baf41fee218c", size = 65406, upload-time = "2025-06-13T06:52:14.271Z" }, + { url = "https://files.pythonhosted.org/packages/c0/23/0abb886e80eab08f5e8c485d6f13924028602829f63b8f5fa25a06636628/msgpack-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:5692095123007180dca3e788bb4c399cc26626da51629a31d40207cb262e67f4", size = 72558, upload-time = "2025-06-13T06:52:15.252Z" }, + { url = "https://files.pythonhosted.org/packages/a1/38/561f01cf3577430b59b340b51329803d3a5bf6a45864a55f4ef308ac11e3/msgpack-1.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3765afa6bd4832fc11c3749be4ba4b69a0e8d7b728f78e68120a157a4c5d41f0", size = 81677, upload-time = "2025-06-13T06:52:16.64Z" }, + { url = "https://files.pythonhosted.org/packages/09/48/54a89579ea36b6ae0ee001cba8c61f776451fad3c9306cd80f5b5c55be87/msgpack-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8ddb2bcfd1a8b9e431c8d6f4f7db0773084e107730ecf3472f1dfe9ad583f3d9", size = 78603, upload-time = "2025-06-13T06:52:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/daba2699b308e95ae792cdc2ef092a38eb5ee422f9d2fbd4101526d8a210/msgpack-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:196a736f0526a03653d829d7d4c5500a97eea3648aebfd4b6743875f28aa2af8", size = 420504, upload-time = "2025-06-13T06:52:18.982Z" }, + { url = "https://files.pythonhosted.org/packages/20/22/2ebae7ae43cd8f2debc35c631172ddf14e2a87ffcc04cf43ff9df9fff0d3/msgpack-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d592d06e3cc2f537ceeeb23d38799c6ad83255289bb84c2e5792e5a8dea268a", size = 423749, upload-time = "2025-06-13T06:52:20.211Z" }, + { url = "https://files.pythonhosted.org/packages/40/1b/54c08dd5452427e1179a40b4b607e37e2664bca1c790c60c442c8e972e47/msgpack-1.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4df2311b0ce24f06ba253fda361f938dfecd7b961576f9be3f3fbd60e87130ac", size = 404458, upload-time = "2025-06-13T06:52:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/2e/60/6bb17e9ffb080616a51f09928fdd5cac1353c9becc6c4a8abd4e57269a16/msgpack-1.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4141c5a32b5e37905b5940aacbc59739f036930367d7acce7a64e4dec1f5e0b", size = 405976, upload-time = "2025-06-13T06:52:22.995Z" }, + { url = "https://files.pythonhosted.org/packages/ee/97/88983e266572e8707c1f4b99c8fd04f9eb97b43f2db40e3172d87d8642db/msgpack-1.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b1ce7f41670c5a69e1389420436f41385b1aa2504c3b0c30620764b15dded2e7", size = 408607, upload-time = "2025-06-13T06:52:24.152Z" }, + { url = "https://files.pythonhosted.org/packages/bc/66/36c78af2efaffcc15a5a61ae0df53a1d025f2680122e2a9eb8442fed3ae4/msgpack-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4147151acabb9caed4e474c3344181e91ff7a388b888f1e19ea04f7e73dc7ad5", size = 424172, upload-time = "2025-06-13T06:52:25.704Z" }, + { url = "https://files.pythonhosted.org/packages/8c/87/a75eb622b555708fe0427fab96056d39d4c9892b0c784b3a721088c7ee37/msgpack-1.1.1-cp313-cp313-win32.whl", hash = "sha256:500e85823a27d6d9bba1d057c871b4210c1dd6fb01fbb764e37e4e8847376323", size = 65347, upload-time = "2025-06-13T06:52:26.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/91/7dc28d5e2a11a5ad804cf2b7f7a5fcb1eb5a4966d66a5d2b41aee6376543/msgpack-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:6d489fba546295983abd142812bda76b57e33d0b9f5d5b71c09a583285506f69", size = 72341, upload-time = "2025-06-13T06:52:27.835Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, + { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, + { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, + { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, + { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, + { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, + { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, + { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, + { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, + { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, + { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, + { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, + { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, + { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, + { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, +] + +[[package]] +name = "nexus-rpc" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/66/540687556bd28cf1ec370cc6881456203dfddb9dab047b8979c6865b5984/nexus_rpc-1.1.0.tar.gz", hash = "sha256:d65ad6a2f54f14e53ebe39ee30555eaeb894102437125733fb13034a04a44553", size = 77383, upload-time = "2025-07-07T19:03:58.368Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/2f/9e9d0dcaa4c6ffa22b7aa31069a8a264c753ff8027b36af602cce038c92f/nexus_rpc-1.1.0-py3-none-any.whl", hash = "sha256:d1b007af2aba186a27e736f8eaae39c03aed05b488084ff6c3d1785c9ba2ad38", size = 27743, upload-time = "2025-07-07T19:03:57.556Z" }, +] + +[[package]] +name = "numpy" +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648, upload-time = "2025-09-09T16:54:12.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/45/e80d203ef6b267aa29b22714fb558930b27960a0c5ce3c19c999232bb3eb/numpy-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ffc4f5caba7dfcbe944ed674b7eef683c7e94874046454bb79ed7ee0236f59d", size = 21259253, upload-time = "2025-09-09T15:56:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/52/18/cf2c648fccf339e59302e00e5f2bc87725a3ce1992f30f3f78c9044d7c43/numpy-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7e946c7170858a0295f79a60214424caac2ffdb0063d4d79cb681f9aa0aa569", size = 14450980, upload-time = "2025-09-09T15:56:05.926Z" }, + { url = "https://files.pythonhosted.org/packages/93/fb/9af1082bec870188c42a1c239839915b74a5099c392389ff04215dcee812/numpy-2.3.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:cd4260f64bc794c3390a63bf0728220dd1a68170c169088a1e0dfa2fde1be12f", size = 5379709, upload-time = "2025-09-09T15:56:07.95Z" }, + { url = "https://files.pythonhosted.org/packages/75/0f/bfd7abca52bcbf9a4a65abc83fe18ef01ccdeb37bfb28bbd6ad613447c79/numpy-2.3.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:f0ddb4b96a87b6728df9362135e764eac3cfa674499943ebc44ce96c478ab125", size = 6913923, upload-time = "2025-09-09T15:56:09.443Z" }, + { url = "https://files.pythonhosted.org/packages/79/55/d69adad255e87ab7afda1caf93ca997859092afeb697703e2f010f7c2e55/numpy-2.3.3-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afd07d377f478344ec6ca2b8d4ca08ae8bd44706763d1efb56397de606393f48", size = 14589591, upload-time = "2025-09-09T15:56:11.234Z" }, + { url = "https://files.pythonhosted.org/packages/10/a2/010b0e27ddeacab7839957d7a8f00e91206e0c2c47abbb5f35a2630e5387/numpy-2.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bc92a5dedcc53857249ca51ef29f5e5f2f8c513e22cfb90faeb20343b8c6f7a6", size = 16938714, upload-time = "2025-09-09T15:56:14.637Z" }, + { url = "https://files.pythonhosted.org/packages/1c/6b/12ce8ede632c7126eb2762b9e15e18e204b81725b81f35176eac14dc5b82/numpy-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7af05ed4dc19f308e1d9fc759f36f21921eb7bbfc82843eeec6b2a2863a0aefa", size = 16370592, upload-time = "2025-09-09T15:56:17.285Z" }, + { url = "https://files.pythonhosted.org/packages/b4/35/aba8568b2593067bb6a8fe4c52babb23b4c3b9c80e1b49dff03a09925e4a/numpy-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433bf137e338677cebdd5beac0199ac84712ad9d630b74eceeb759eaa45ddf30", size = 18884474, upload-time = "2025-09-09T15:56:20.943Z" }, + { url = "https://files.pythonhosted.org/packages/45/fa/7f43ba10c77575e8be7b0138d107e4f44ca4a1ef322cd16980ea3e8b8222/numpy-2.3.3-cp311-cp311-win32.whl", hash = "sha256:eb63d443d7b4ffd1e873f8155260d7f58e7e4b095961b01c91062935c2491e57", size = 6599794, upload-time = "2025-09-09T15:56:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/0a/a2/a4f78cb2241fe5664a22a10332f2be886dcdea8784c9f6a01c272da9b426/numpy-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:ec9d249840f6a565f58d8f913bccac2444235025bbb13e9a4681783572ee3caa", size = 13088104, upload-time = "2025-09-09T15:56:25.476Z" }, + { url = "https://files.pythonhosted.org/packages/79/64/e424e975adbd38282ebcd4891661965b78783de893b381cbc4832fb9beb2/numpy-2.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:74c2a948d02f88c11a3c075d9733f1ae67d97c6bdb97f2bb542f980458b257e7", size = 10460772, upload-time = "2025-09-09T15:56:27.679Z" }, + { url = "https://files.pythonhosted.org/packages/51/5d/bb7fc075b762c96329147799e1bcc9176ab07ca6375ea976c475482ad5b3/numpy-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cfdd09f9c84a1a934cde1eec2267f0a43a7cd44b2cca4ff95b7c0d14d144b0bf", size = 20957014, upload-time = "2025-09-09T15:56:29.966Z" }, + { url = "https://files.pythonhosted.org/packages/6b/0e/c6211bb92af26517acd52125a237a92afe9c3124c6a68d3b9f81b62a0568/numpy-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb32e3cf0f762aee47ad1ddc6672988f7f27045b0783c887190545baba73aa25", size = 14185220, upload-time = "2025-09-09T15:56:32.175Z" }, + { url = "https://files.pythonhosted.org/packages/22/f2/07bb754eb2ede9073f4054f7c0286b0d9d2e23982e090a80d478b26d35ca/numpy-2.3.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:396b254daeb0a57b1fe0ecb5e3cff6fa79a380fa97c8f7781a6d08cd429418fe", size = 5113918, upload-time = "2025-09-09T15:56:34.175Z" }, + { url = "https://files.pythonhosted.org/packages/81/0a/afa51697e9fb74642f231ea36aca80fa17c8fb89f7a82abd5174023c3960/numpy-2.3.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:067e3d7159a5d8f8a0b46ee11148fc35ca9b21f61e3c49fbd0a027450e65a33b", size = 6647922, upload-time = "2025-09-09T15:56:36.149Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f5/122d9cdb3f51c520d150fef6e87df9279e33d19a9611a87c0d2cf78a89f4/numpy-2.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c02d0629d25d426585fb2e45a66154081b9fa677bc92a881ff1d216bc9919a8", size = 14281991, upload-time = "2025-09-09T15:56:40.548Z" }, + { url = "https://files.pythonhosted.org/packages/51/64/7de3c91e821a2debf77c92962ea3fe6ac2bc45d0778c1cbe15d4fce2fd94/numpy-2.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9192da52b9745f7f0766531dcfa978b7763916f158bb63bdb8a1eca0068ab20", size = 16641643, upload-time = "2025-09-09T15:56:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/30/e4/961a5fa681502cd0d68907818b69f67542695b74e3ceaa513918103b7e80/numpy-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd7de500a5b66319db419dc3c345244404a164beae0d0937283b907d8152e6ea", size = 16056787, upload-time = "2025-09-09T15:56:46.141Z" }, + { url = "https://files.pythonhosted.org/packages/99/26/92c912b966e47fbbdf2ad556cb17e3a3088e2e1292b9833be1dfa5361a1a/numpy-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93d4962d8f82af58f0b2eb85daaf1b3ca23fe0a85d0be8f1f2b7bb46034e56d7", size = 18579598, upload-time = "2025-09-09T15:56:49.844Z" }, + { url = "https://files.pythonhosted.org/packages/17/b6/fc8f82cb3520768718834f310c37d96380d9dc61bfdaf05fe5c0b7653e01/numpy-2.3.3-cp312-cp312-win32.whl", hash = "sha256:5534ed6b92f9b7dca6c0a19d6df12d41c68b991cef051d108f6dbff3babc4ebf", size = 6320800, upload-time = "2025-09-09T15:56:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/32/ee/de999f2625b80d043d6d2d628c07d0d5555a677a3cf78fdf868d409b8766/numpy-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:497d7cad08e7092dba36e3d296fe4c97708c93daf26643a1ae4b03f6294d30eb", size = 12786615, upload-time = "2025-09-09T15:56:54.422Z" }, + { url = "https://files.pythonhosted.org/packages/49/6e/b479032f8a43559c383acb20816644f5f91c88f633d9271ee84f3b3a996c/numpy-2.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:ca0309a18d4dfea6fc6262a66d06c26cfe4640c3926ceec90e57791a82b6eee5", size = 10195936, upload-time = "2025-09-09T15:56:56.541Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b9/984c2b1ee61a8b803bf63582b4ac4242cf76e2dbd663efeafcb620cc0ccb/numpy-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f5415fb78995644253370985342cd03572ef8620b934da27d77377a2285955bf", size = 20949588, upload-time = "2025-09-09T15:56:59.087Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e4/07970e3bed0b1384d22af1e9912527ecbeb47d3b26e9b6a3bced068b3bea/numpy-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d00de139a3324e26ed5b95870ce63be7ec7352171bc69a4cf1f157a48e3eb6b7", size = 14177802, upload-time = "2025-09-09T15:57:01.73Z" }, + { url = "https://files.pythonhosted.org/packages/35/c7/477a83887f9de61f1203bad89cf208b7c19cc9fef0cebef65d5a1a0619f2/numpy-2.3.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9dc13c6a5829610cc07422bc74d3ac083bd8323f14e2827d992f9e52e22cd6a6", size = 5106537, upload-time = "2025-09-09T15:57:03.765Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/93b953bd5866a6f6986344d045a207d3f1cfbad99db29f534ea9cee5108c/numpy-2.3.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d79715d95f1894771eb4e60fb23f065663b2298f7d22945d66877aadf33d00c7", size = 6640743, upload-time = "2025-09-09T15:57:07.921Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/377f84aaeb800b64c0ef4de58b08769e782edcefa4fea712910b6f0afd3c/numpy-2.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952cfd0748514ea7c3afc729a0fc639e61655ce4c55ab9acfab14bda4f402b4c", size = 14278881, upload-time = "2025-09-09T15:57:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a5/bf3db6e66c4b160d6ea10b534c381a1955dfab34cb1017ea93aa33c70ed3/numpy-2.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b83648633d46f77039c29078751f80da65aa64d5622a3cd62aaef9d835b6c93", size = 16636301, upload-time = "2025-09-09T15:57:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/1287924242eb4fa3f9b3a2c30400f2e17eb2707020d1c5e3086fe7330717/numpy-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b001bae8cea1c7dfdb2ae2b017ed0a6f2102d7a70059df1e338e307a4c78a8ae", size = 16053645, upload-time = "2025-09-09T15:57:16.534Z" }, + { url = "https://files.pythonhosted.org/packages/e6/93/b3d47ed882027c35e94ac2320c37e452a549f582a5e801f2d34b56973c97/numpy-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e9aced64054739037d42fb84c54dd38b81ee238816c948c8f3ed134665dcd86", size = 18578179, upload-time = "2025-09-09T15:57:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/487a2bccbf7cc9d4bfc5f0f197761a5ef27ba870f1e3bbb9afc4bbe3fcc2/numpy-2.3.3-cp313-cp313-win32.whl", hash = "sha256:9591e1221db3f37751e6442850429b3aabf7026d3b05542d102944ca7f00c8a8", size = 6312250, upload-time = "2025-09-09T15:57:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b5/263ebbbbcede85028f30047eab3d58028d7ebe389d6493fc95ae66c636ab/numpy-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f0dadeb302887f07431910f67a14d57209ed91130be0adea2f9793f1a4f817cf", size = 12783269, upload-time = "2025-09-09T15:57:23.034Z" }, + { url = "https://files.pythonhosted.org/packages/fa/75/67b8ca554bbeaaeb3fac2e8bce46967a5a06544c9108ec0cf5cece559b6c/numpy-2.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:3c7cf302ac6e0b76a64c4aecf1a09e51abd9b01fc7feee80f6c43e3ab1b1dbc5", size = 10195314, upload-time = "2025-09-09T15:57:25.045Z" }, + { url = "https://files.pythonhosted.org/packages/11/d0/0d1ddec56b162042ddfafeeb293bac672de9b0cfd688383590090963720a/numpy-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:eda59e44957d272846bb407aad19f89dc6f58fecf3504bd144f4c5cf81a7eacc", size = 21048025, upload-time = "2025-09-09T15:57:27.257Z" }, + { url = "https://files.pythonhosted.org/packages/36/9e/1996ca6b6d00415b6acbdd3c42f7f03ea256e2c3f158f80bd7436a8a19f3/numpy-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:823d04112bc85ef5c4fda73ba24e6096c8f869931405a80aa8b0e604510a26bc", size = 14301053, upload-time = "2025-09-09T15:57:30.077Z" }, + { url = "https://files.pythonhosted.org/packages/05/24/43da09aa764c68694b76e84b3d3f0c44cb7c18cdc1ba80e48b0ac1d2cd39/numpy-2.3.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:40051003e03db4041aa325da2a0971ba41cf65714e65d296397cc0e32de6018b", size = 5229444, upload-time = "2025-09-09T15:57:32.733Z" }, + { url = "https://files.pythonhosted.org/packages/bc/14/50ffb0f22f7218ef8af28dd089f79f68289a7a05a208db9a2c5dcbe123c1/numpy-2.3.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ee9086235dd6ab7ae75aba5662f582a81ced49f0f1c6de4260a78d8f2d91a19", size = 6738039, upload-time = "2025-09-09T15:57:34.328Z" }, + { url = "https://files.pythonhosted.org/packages/55/52/af46ac0795e09657d45a7f4db961917314377edecf66db0e39fa7ab5c3d3/numpy-2.3.3-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:94fcaa68757c3e2e668ddadeaa86ab05499a70725811e582b6a9858dd472fb30", size = 14352314, upload-time = "2025-09-09T15:57:36.255Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b1/dc226b4c90eb9f07a3fff95c2f0db3268e2e54e5cce97c4ac91518aee71b/numpy-2.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da1a74b90e7483d6ce5244053399a614b1d6b7bc30a60d2f570e5071f8959d3e", size = 16701722, upload-time = "2025-09-09T15:57:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/9d/9d/9d8d358f2eb5eced14dba99f110d83b5cd9a4460895230f3b396ad19a323/numpy-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2990adf06d1ecee3b3dcbb4977dfab6e9f09807598d647f04d385d29e7a3c3d3", size = 16132755, upload-time = "2025-09-09T15:57:41.16Z" }, + { url = "https://files.pythonhosted.org/packages/b6/27/b3922660c45513f9377b3fb42240bec63f203c71416093476ec9aa0719dc/numpy-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ed635ff692483b8e3f0fcaa8e7eb8a75ee71aa6d975388224f70821421800cea", size = 18651560, upload-time = "2025-09-09T15:57:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8e/3ab61a730bdbbc201bb245a71102aa609f0008b9ed15255500a99cd7f780/numpy-2.3.3-cp313-cp313t-win32.whl", hash = "sha256:a333b4ed33d8dc2b373cc955ca57babc00cd6f9009991d9edc5ddbc1bac36bcd", size = 6442776, upload-time = "2025-09-09T15:57:45.793Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3a/e22b766b11f6030dc2decdeff5c2fb1610768055603f9f3be88b6d192fb2/numpy-2.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:4384a169c4d8f97195980815d6fcad04933a7e1ab3b530921c3fef7a1c63426d", size = 12927281, upload-time = "2025-09-09T15:57:47.492Z" }, + { url = "https://files.pythonhosted.org/packages/7b/42/c2e2bc48c5e9b2a83423f99733950fbefd86f165b468a3d85d52b30bf782/numpy-2.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:75370986cc0bc66f4ce5110ad35aae6d182cc4ce6433c40ad151f53690130bf1", size = 10265275, upload-time = "2025-09-09T15:57:49.647Z" }, + { url = "https://files.pythonhosted.org/packages/6b/01/342ad585ad82419b99bcf7cebe99e61da6bedb89e213c5fd71acc467faee/numpy-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cd052f1fa6a78dee696b58a914b7229ecfa41f0a6d96dc663c1220a55e137593", size = 20951527, upload-time = "2025-09-09T15:57:52.006Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d8/204e0d73fc1b7a9ee80ab1fe1983dd33a4d64a4e30a05364b0208e9a241a/numpy-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:414a97499480067d305fcac9716c29cf4d0d76db6ebf0bf3cbce666677f12652", size = 14186159, upload-time = "2025-09-09T15:57:54.407Z" }, + { url = "https://files.pythonhosted.org/packages/22/af/f11c916d08f3a18fb8ba81ab72b5b74a6e42ead4c2846d270eb19845bf74/numpy-2.3.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:50a5fe69f135f88a2be9b6ca0481a68a136f6febe1916e4920e12f1a34e708a7", size = 5114624, upload-time = "2025-09-09T15:57:56.5Z" }, + { url = "https://files.pythonhosted.org/packages/fb/11/0ed919c8381ac9d2ffacd63fd1f0c34d27e99cab650f0eb6f110e6ae4858/numpy-2.3.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:b912f2ed2b67a129e6a601e9d93d4fa37bef67e54cac442a2f588a54afe5c67a", size = 6642627, upload-time = "2025-09-09T15:57:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/ee/83/deb5f77cb0f7ba6cb52b91ed388b47f8f3c2e9930d4665c600408d9b90b9/numpy-2.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9e318ee0596d76d4cb3d78535dc005fa60e5ea348cd131a51e99d0bdbe0b54fe", size = 14296926, upload-time = "2025-09-09T15:58:00.035Z" }, + { url = "https://files.pythonhosted.org/packages/77/cc/70e59dcb84f2b005d4f306310ff0a892518cc0c8000a33d0e6faf7ca8d80/numpy-2.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce020080e4a52426202bdb6f7691c65bb55e49f261f31a8f506c9f6bc7450421", size = 16638958, upload-time = "2025-09-09T15:58:02.738Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5a/b2ab6c18b4257e099587d5b7f903317bd7115333ad8d4ec4874278eafa61/numpy-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e6687dc183aa55dae4a705b35f9c0f8cb178bcaa2f029b241ac5356221d5c021", size = 16071920, upload-time = "2025-09-09T15:58:05.029Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f1/8b3fdc44324a259298520dd82147ff648979bed085feeacc1250ef1656c0/numpy-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d8f3b1080782469fdc1718c4ed1d22549b5fb12af0d57d35e992158a772a37cf", size = 18577076, upload-time = "2025-09-09T15:58:07.745Z" }, + { url = "https://files.pythonhosted.org/packages/f0/a1/b87a284fb15a42e9274e7fcea0dad259d12ddbf07c1595b26883151ca3b4/numpy-2.3.3-cp314-cp314-win32.whl", hash = "sha256:cb248499b0bc3be66ebd6578b83e5acacf1d6cb2a77f2248ce0e40fbec5a76d0", size = 6366952, upload-time = "2025-09-09T15:58:10.096Z" }, + { url = "https://files.pythonhosted.org/packages/70/5f/1816f4d08f3b8f66576d8433a66f8fa35a5acfb3bbd0bf6c31183b003f3d/numpy-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:691808c2b26b0f002a032c73255d0bd89751425f379f7bcd22d140db593a96e8", size = 12919322, upload-time = "2025-09-09T15:58:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/8c/de/072420342e46a8ea41c324a555fa90fcc11637583fb8df722936aed1736d/numpy-2.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:9ad12e976ca7b10f1774b03615a2a4bab8addce37ecc77394d8e986927dc0dfe", size = 10478630, upload-time = "2025-09-09T15:58:14.64Z" }, + { url = "https://files.pythonhosted.org/packages/d5/df/ee2f1c0a9de7347f14da5dd3cd3c3b034d1b8607ccb6883d7dd5c035d631/numpy-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9cc48e09feb11e1db00b320e9d30a4151f7369afb96bd0e48d942d09da3a0d00", size = 21047987, upload-time = "2025-09-09T15:58:16.889Z" }, + { url = "https://files.pythonhosted.org/packages/d6/92/9453bdc5a4e9e69cf4358463f25e8260e2ffc126d52e10038b9077815989/numpy-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:901bf6123879b7f251d3631967fd574690734236075082078e0571977c6a8e6a", size = 14301076, upload-time = "2025-09-09T15:58:20.343Z" }, + { url = "https://files.pythonhosted.org/packages/13/77/1447b9eb500f028bb44253105bd67534af60499588a5149a94f18f2ca917/numpy-2.3.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:7f025652034199c301049296b59fa7d52c7e625017cae4c75d8662e377bf487d", size = 5229491, upload-time = "2025-09-09T15:58:22.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/f9/d72221b6ca205f9736cb4b2ce3b002f6e45cd67cd6a6d1c8af11a2f0b649/numpy-2.3.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:533ca5f6d325c80b6007d4d7fb1984c303553534191024ec6a524a4c92a5935a", size = 6737913, upload-time = "2025-09-09T15:58:24.569Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/d12834711962ad9c46af72f79bb31e73e416ee49d17f4c797f72c96b6ca5/numpy-2.3.3-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0edd58682a399824633b66885d699d7de982800053acf20be1eaa46d92009c54", size = 14352811, upload-time = "2025-09-09T15:58:26.416Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0d/fdbec6629d97fd1bebed56cd742884e4eead593611bbe1abc3eb40d304b2/numpy-2.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:367ad5d8fbec5d9296d18478804a530f1191e24ab4d75ab408346ae88045d25e", size = 16702689, upload-time = "2025-09-09T15:58:28.831Z" }, + { url = "https://files.pythonhosted.org/packages/9b/09/0a35196dc5575adde1eb97ddfbc3e1687a814f905377621d18ca9bc2b7dd/numpy-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8f6ac61a217437946a1fa48d24c47c91a0c4f725237871117dea264982128097", size = 16133855, upload-time = "2025-09-09T15:58:31.349Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ca/c9de3ea397d576f1b6753eaa906d4cdef1bf97589a6d9825a349b4729cc2/numpy-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:179a42101b845a816d464b6fe9a845dfaf308fdfc7925387195570789bb2c970", size = 18652520, upload-time = "2025-09-09T15:58:33.762Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c2/e5ed830e08cd0196351db55db82f65bc0ab05da6ef2b72a836dcf1936d2f/numpy-2.3.3-cp314-cp314t-win32.whl", hash = "sha256:1250c5d3d2562ec4174bce2e3a1523041595f9b651065e4a4473f5f48a6bc8a5", size = 6515371, upload-time = "2025-09-09T15:58:36.04Z" }, + { url = "https://files.pythonhosted.org/packages/47/c7/b0f6b5b67f6788a0725f744496badbb604d226bf233ba716683ebb47b570/numpy-2.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:b37a0b2e5935409daebe82c1e42274d30d9dd355852529eab91dab8dcca7419f", size = 13112576, upload-time = "2025-09-09T15:58:37.927Z" }, + { url = "https://files.pythonhosted.org/packages/06/b9/33bba5ff6fb679aa0b1f8a07e853f002a6b04b9394db3069a1270a7784ca/numpy-2.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:78c9f6560dc7e6b3990e32df7ea1a50bbd0e2a111e05209963f5ddcab7073b0b", size = 10545953, upload-time = "2025-09-09T15:58:40.576Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f2/7e0a37cfced2644c9563c529f29fa28acbd0960dde32ece683aafa6f4949/numpy-2.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1e02c7159791cd481e1e6d5ddd766b62a4d5acf8df4d4d1afe35ee9c5c33a41e", size = 21131019, upload-time = "2025-09-09T15:58:42.838Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/3291f505297ed63831135a6cc0f474da0c868a1f31b0dd9a9f03a7a0d2ed/numpy-2.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:dca2d0fc80b3893ae72197b39f69d55a3cd8b17ea1b50aa4c62de82419936150", size = 14376288, upload-time = "2025-09-09T15:58:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4b/ae02e985bdeee73d7b5abdefeb98aef1207e96d4c0621ee0cf228ddfac3c/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:99683cbe0658f8271b333a1b1b4bb3173750ad59c0c61f5bbdc5b318918fffe3", size = 5305425, upload-time = "2025-09-09T15:58:48.6Z" }, + { url = "https://files.pythonhosted.org/packages/8b/eb/9df215d6d7250db32007941500dc51c48190be25f2401d5b2b564e467247/numpy-2.3.3-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d9d537a39cc9de668e5cd0e25affb17aec17b577c6b3ae8a3d866b479fbe88d0", size = 6819053, upload-time = "2025-09-09T15:58:50.401Z" }, + { url = "https://files.pythonhosted.org/packages/57/62/208293d7d6b2a8998a4a1f23ac758648c3c32182d4ce4346062018362e29/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8596ba2f8af5f93b01d97563832686d20206d303024777f6dfc2e7c7c3f1850e", size = 14420354, upload-time = "2025-09-09T15:58:52.704Z" }, + { url = "https://files.pythonhosted.org/packages/ed/0c/8e86e0ff7072e14a71b4c6af63175e40d1e7e933ce9b9e9f765a95b4e0c3/numpy-2.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1ec5615b05369925bd1125f27df33f3b6c8bc10d788d5999ecd8769a1fa04db", size = 16760413, upload-time = "2025-09-09T15:58:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/af/11/0cc63f9f321ccf63886ac203336777140011fb669e739da36d8db3c53b98/numpy-2.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2e267c7da5bf7309670523896df97f93f6e469fb931161f483cd6882b3b1a5dc", size = 12971844, upload-time = "2025-09-09T15:58:57.359Z" }, ] [[package]] name = "openai" -version = "1.82.0" +version = "1.109.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -833,57 +1620,57 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3f/19/6b09bb3132f7e1a7a2291fd46fb33659bbccca041f863abd682e14ba86d7/openai-1.82.0.tar.gz", hash = "sha256:b0a009b9a58662d598d07e91e4219ab4b1e3d8ba2db3f173896a92b9b874d1a7", size = 461092, upload-time = "2025-05-22T20:08:07.282Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/4b/a59464ee5f77822a81ee069b4021163a0174940a92685efc3cf8b4c443a3/openai-1.82.0-py3-none-any.whl", hash = "sha256:8c40647fea1816516cb3de5189775b30b5f4812777e40b8768f361f232b61b30", size = 720412, upload-time = "2025-05-22T20:08:05.637Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" }, ] [[package]] name = "opentelemetry-api" -version = "1.33.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "importlib-metadata" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/8d/1f5a45fbcb9a7d87809d460f09dc3399e3fbd31d7f3e14888345e9d29951/opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8", size = 65002, upload-time = "2025-05-16T18:52:41.146Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/04/05040d7ce33a907a2a02257e601992f0cdf11c73b33f13c4492bf6c3d6d5/opentelemetry_api-1.37.0.tar.gz", hash = "sha256:540735b120355bd5112738ea53621f8d5edb35ebcd6fe21ada3ab1c61d1cd9a7", size = 64923, upload-time = "2025-09-11T10:29:01.662Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/44/4c45a34def3506122ae61ad684139f0bbc4e00c39555d4f7e20e0e001c8a/opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83", size = 65771, upload-time = "2025-05-16T18:52:17.419Z" }, + { url = "https://files.pythonhosted.org/packages/91/48/28ed9e55dcf2f453128df738210a980e09f4e468a456fa3c763dbc8be70a/opentelemetry_api-1.37.0-py3-none-any.whl", hash = "sha256:accf2024d3e89faec14302213bc39550ec0f4095d1cf5ca688e1bfb1c8612f47", size = 65732, upload-time = "2025-09-11T10:28:41.826Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.33.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/18/a1ec9dcb6713a48b4bdd10f1c1e4d5d2489d3912b80d2bcc059a9a842836/opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131", size = 20828, upload-time = "2025-05-16T18:52:43.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/6c/10018cbcc1e6fff23aac67d7fd977c3d692dbe5f9ef9bb4db5c1268726cc/opentelemetry_exporter_otlp_proto_common-1.37.0.tar.gz", hash = "sha256:c87a1bdd9f41fdc408d9cc9367bb53f8d2602829659f2b90be9f9d79d0bfe62c", size = 20430, upload-time = "2025-09-11T10:29:03.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/52/9bcb17e2c29c1194a28e521b9d3f2ced09028934c3c52a8205884c94b2df/opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36", size = 18839, upload-time = "2025-05-16T18:52:22.447Z" }, + { url = "https://files.pythonhosted.org/packages/08/13/b4ef09837409a777f3c0af2a5b4ba9b7af34872bc43609dda0c209e4060d/opentelemetry_exporter_otlp_proto_common-1.37.0-py3-none-any.whl", hash = "sha256:53038428449c559b0c564b8d718df3314da387109c4d36bd1b94c9a641b0292e", size = 18359, upload-time = "2025-09-11T10:28:44.939Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.33.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "googleapis-common-protos" }, { name = "opentelemetry-api" }, { name = "opentelemetry-exporter-otlp-proto-common" }, { name = "opentelemetry-proto" }, { name = "opentelemetry-sdk" }, { name = "requests" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/48/e4314ac0ed2ad043c07693d08c9c4bf5633857f5b72f2fefc64fd2b114f6/opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38", size = 15353, upload-time = "2025-05-16T18:52:45.522Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5d/e3/6e320aeb24f951449e73867e53c55542bebbaf24faeee7623ef677d66736/opentelemetry_exporter_otlp_proto_http-1.37.0.tar.gz", hash = "sha256:e52e8600f1720d6de298419a802108a8f5afa63c96809ff83becb03f874e44ac", size = 17281, upload-time = "2025-09-11T10:29:04.844Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/63/ba/5a4ad007588016fe37f8d36bf08f325fe684494cc1e88ca8fa064a4c8f57/opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf", size = 17733, upload-time = "2025-05-16T18:52:25.137Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/70d74a664d83976556cec395d6bfedd9b85ec1498b778367d5f93e373397/opentelemetry_exporter_otlp_proto_http-1.37.0-py3-none-any.whl", hash = "sha256:54c42b39945a6cc9d9a2a33decb876eabb9547e0dcb49df090122773447f1aef", size = 19576, upload-time = "2025-09-11T10:28:46.726Z" }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.54b1" +version = "0.58b0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -891,48 +1678,137 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/fd/5756aea3fdc5651b572d8aef7d94d22a0a36e49c8b12fcb78cb905ba8896/opentelemetry_instrumentation-0.54b1.tar.gz", hash = "sha256:7658bf2ff914b02f246ec14779b66671508125c0e4227361e56b5ebf6cef0aec", size = 28436, upload-time = "2025-05-16T19:03:22.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/36/7c307d9be8ce4ee7beb86d7f1d31027f2a6a89228240405a858d6e4d64f9/opentelemetry_instrumentation-0.58b0.tar.gz", hash = "sha256:df640f3ac715a3e05af145c18f527f4422c6ab6c467e40bd24d2ad75a00cb705", size = 31549, upload-time = "2025-09-11T11:42:14.084Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/89/0790abc5d9c4fc74bd3e03cb87afe2c820b1d1a112a723c1163ef32453ee/opentelemetry_instrumentation-0.54b1-py3-none-any.whl", hash = "sha256:a4ae45f4a90c78d7006c51524f57cd5aa1231aef031eae905ee34d5423f5b198", size = 31019, upload-time = "2025-05-16T19:02:15.611Z" }, + { url = "https://files.pythonhosted.org/packages/d4/db/5ff1cd6c5ca1d12ecf1b73be16fbb2a8af2114ee46d4b0e6d4b23f4f4db7/opentelemetry_instrumentation-0.58b0-py3-none-any.whl", hash = "sha256:50f97ac03100676c9f7fc28197f8240c7290ca1baa12da8bfbb9a1de4f34cc45", size = 33019, upload-time = "2025-09-11T11:41:00.624Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/21/ba3a0106795337716e5e324f58fd3c04f5967e330c0408d0d68d873454db/opentelemetry_instrumentation_httpx-0.58b0.tar.gz", hash = "sha256:3cd747e7785a06d06bd58875e8eb11595337c98c4341f4fe176ff1f734a90db7", size = 19887, upload-time = "2025-09-11T11:42:37.926Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/e7/6dc8ee4881889993fa4a7d3da225e5eded239c975b9831eff392abd5a5e4/opentelemetry_instrumentation_httpx-0.58b0-py3-none-any.whl", hash = "sha256:d3f5a36c7fed08c245f1b06d1efd91f624caf2bff679766df80981486daaccdb", size = 15197, upload-time = "2025-09-11T11:41:32.66Z" }, ] [[package]] name = "opentelemetry-proto" -version = "1.33.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/dc/791f3d60a1ad8235930de23eea735ae1084be1c6f96fdadf38710662a7e5/opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68", size = 34363, upload-time = "2025-05-16T18:52:52.141Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/ea/a75f36b463a36f3c5a10c0b5292c58b31dbdde74f6f905d3d0ab2313987b/opentelemetry_proto-1.37.0.tar.gz", hash = "sha256:30f5c494faf66f77faeaefa35ed4443c5edb3b0aa46dad073ed7210e1a789538", size = 46151, upload-time = "2025-09-11T10:29:11.04Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/29/48609f4c875c2b6c80930073c82dd1cafd36b6782244c01394007b528960/opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70", size = 55854, upload-time = "2025-05-16T18:52:36.269Z" }, + { url = "https://files.pythonhosted.org/packages/c4/25/f89ea66c59bd7687e218361826c969443c4fa15dfe89733f3bf1e2a9e971/opentelemetry_proto-1.37.0-py3-none-any.whl", hash = "sha256:8ed8c066ae8828bbf0c39229979bdf583a126981142378a9cbe9d6fd5701c6e2", size = 72534, upload-time = "2025-09-11T10:28:56.831Z" }, ] [[package]] name = "opentelemetry-sdk" -version = "1.33.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/12/909b98a7d9b110cce4b28d49b2e311797cffdce180371f35eba13a72dd00/opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531", size = 161885, upload-time = "2025-05-16T18:52:52.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/62/2e0ca80d7fe94f0b193135375da92c640d15fe81f636658d2acf373086bc/opentelemetry_sdk-1.37.0.tar.gz", hash = "sha256:cc8e089c10953ded765b5ab5669b198bbe0af1b3f89f1007d19acd32dc46dda5", size = 170404, upload-time = "2025-09-11T10:29:11.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/8e/ae2d0742041e0bd7fe0d2dcc5e7cce51dcf7d3961a26072d5b43cc8fa2a7/opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112", size = 118950, upload-time = "2025-05-16T18:52:37.297Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/9f4ad6a54126fb00f7ed4bb5034964c6e4f00fcd5a905e115bd22707e20d/opentelemetry_sdk-1.37.0-py3-none-any.whl", hash = "sha256:8f3c3c22063e52475c5dbced7209495c2c16723d016d39287dfc215d1771257c", size = 131941, upload-time = "2025-09-11T10:28:57.83Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.54b1" +version = "0.58b0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated" }, { name = "opentelemetry-api" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/2c/d7990fc1ffc82889d466e7cd680788ace44a26789809924813b164344393/opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee", size = 118642, upload-time = "2025-05-16T18:52:53.962Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/80/08b1698c52ff76d96ba440bf15edc2f4bc0a279868778928e947c1004bdd/opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d", size = 194938, upload-time = "2025-05-16T18:52:38.796Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/aa/1b/90701d91e6300d9f2fb352153fb1721ed99ed1f6ea14fa992c756016e63a/opentelemetry_semantic_conventions-0.58b0.tar.gz", hash = "sha256:6bd46f51264279c433755767bb44ad00f1c9e2367e1b42af563372c5a6fa0c25", size = 129867, upload-time = "2025-09-11T10:29:12.597Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/90/68152b7465f50285d3ce2481b3aec2f82822e3f52e5152eeeaf516bab841/opentelemetry_semantic_conventions-0.58b0-py3-none-any.whl", hash = "sha256:5564905ab1458b96684db1340232729fce3b5375a06e140e8904c78e4f815b28", size = 207954, upload-time = "2025-09-11T10:28:59.218Z" }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.58b0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5f/02f31530faf50ef8a41ab34901c05cbbf8e9d76963ba2fb852b0b4065f4e/opentelemetry_util_http-0.58b0.tar.gz", hash = "sha256:de0154896c3472c6599311c83e0ecee856c4da1b17808d39fdc5cce5312e4d89", size = 9411, upload-time = "2025-09-11T11:43:05.602Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/a3/0a1430c42c6d34d8372a16c104e7408028f0c30270d8f3eb6cccf2e82934/opentelemetry_util_http-0.58b0-py3-none-any.whl", hash = "sha256:6c6b86762ed43025fbd593dc5f700ba0aa3e09711aedc36fd48a13b23d8cb1e7", size = 7652, upload-time = "2025-09-11T11:42:09.682Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/4d/8df5f83256a809c22c4d6792ce8d43bb503be0fb7a8e4da9025754b09658/orjson-3.11.3.tar.gz", hash = "sha256:1c0603b1d2ffcd43a411d64797a19556ef76958aef1c182f22dc30860152a98a", size = 5482394, upload-time = "2025-08-26T17:46:43.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/8b/360674cd817faef32e49276187922a946468579fcaf37afdfb6c07046e92/orjson-3.11.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d2ae0cc6aeb669633e0124531f342a17d8e97ea999e42f12a5ad4adaa304c5f", size = 238238, upload-time = "2025-08-26T17:44:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/05/3d/5fa9ea4b34c1a13be7d9046ba98d06e6feb1d8853718992954ab59d16625/orjson-3.11.3-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:ba21dbb2493e9c653eaffdc38819b004b7b1b246fb77bfc93dc016fe664eac91", size = 127713, upload-time = "2025-08-26T17:44:55.596Z" }, + { url = "https://files.pythonhosted.org/packages/e5/5f/e18367823925e00b1feec867ff5f040055892fc474bf5f7875649ecfa586/orjson-3.11.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00f1a271e56d511d1569937c0447d7dce5a99a33ea0dec76673706360a051904", size = 123241, upload-time = "2025-08-26T17:44:57.185Z" }, + { url = "https://files.pythonhosted.org/packages/0f/bd/3c66b91c4564759cf9f473251ac1650e446c7ba92a7c0f9f56ed54f9f0e6/orjson-3.11.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b67e71e47caa6680d1b6f075a396d04fa6ca8ca09aafb428731da9b3ea32a5a6", size = 127895, upload-time = "2025-08-26T17:44:58.349Z" }, + { url = "https://files.pythonhosted.org/packages/82/b5/dc8dcd609db4766e2967a85f63296c59d4722b39503e5b0bf7fd340d387f/orjson-3.11.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7d012ebddffcce8c85734a6d9e5f08180cd3857c5f5a3ac70185b43775d043d", size = 130303, upload-time = "2025-08-26T17:44:59.491Z" }, + { url = "https://files.pythonhosted.org/packages/48/c2/d58ec5fd1270b2aa44c862171891adc2e1241bd7dab26c8f46eb97c6c6f1/orjson-3.11.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd759f75d6b8d1b62012b7f5ef9461d03c804f94d539a5515b454ba3a6588038", size = 132366, upload-time = "2025-08-26T17:45:00.654Z" }, + { url = "https://files.pythonhosted.org/packages/73/87/0ef7e22eb8dd1ef940bfe3b9e441db519e692d62ed1aae365406a16d23d0/orjson-3.11.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6890ace0809627b0dff19cfad92d69d0fa3f089d3e359a2a532507bb6ba34efb", size = 135180, upload-time = "2025-08-26T17:45:02.424Z" }, + { url = "https://files.pythonhosted.org/packages/bb/6a/e5bf7b70883f374710ad74faf99bacfc4b5b5a7797c1d5e130350e0e28a3/orjson-3.11.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d4a5e041ae435b815e568537755773d05dac031fee6a57b4ba70897a44d9d2", size = 132741, upload-time = "2025-08-26T17:45:03.663Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0c/4577fd860b6386ffaa56440e792af01c7882b56d2766f55384b5b0e9d39b/orjson-3.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d68bf97a771836687107abfca089743885fb664b90138d8761cce61d5625d55", size = 131104, upload-time = "2025-08-26T17:45:04.939Z" }, + { url = "https://files.pythonhosted.org/packages/66/4b/83e92b2d67e86d1c33f2ea9411742a714a26de63641b082bdbf3d8e481af/orjson-3.11.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfc27516ec46f4520b18ef645864cee168d2a027dbf32c5537cb1f3e3c22dac1", size = 403887, upload-time = "2025-08-26T17:45:06.228Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e5/9eea6a14e9b5ceb4a271a1fd2e1dec5f2f686755c0fab6673dc6ff3433f4/orjson-3.11.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f66b001332a017d7945e177e282a40b6997056394e3ed7ddb41fb1813b83e824", size = 145855, upload-time = "2025-08-26T17:45:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/45/78/8d4f5ad0c80ba9bf8ac4d0fc71f93a7d0dc0844989e645e2074af376c307/orjson-3.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:212e67806525d2561efbfe9e799633b17eb668b8964abed6b5319b2f1cfbae1f", size = 135361, upload-time = "2025-08-26T17:45:09.625Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5f/16386970370178d7a9b438517ea3d704efcf163d286422bae3b37b88dbb5/orjson-3.11.3-cp311-cp311-win32.whl", hash = "sha256:6e8e0c3b85575a32f2ffa59de455f85ce002b8bdc0662d6b9c2ed6d80ab5d204", size = 136190, upload-time = "2025-08-26T17:45:10.962Z" }, + { url = "https://files.pythonhosted.org/packages/09/60/db16c6f7a41dd8ac9fb651f66701ff2aeb499ad9ebc15853a26c7c152448/orjson-3.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:6be2f1b5d3dc99a5ce5ce162fc741c22ba9f3443d3dd586e6a1211b7bc87bc7b", size = 131389, upload-time = "2025-08-26T17:45:12.285Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2a/bb811ad336667041dea9b8565c7c9faf2f59b47eb5ab680315eea612ef2e/orjson-3.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:fafb1a99d740523d964b15c8db4eabbfc86ff29f84898262bf6e3e4c9e97e43e", size = 126120, upload-time = "2025-08-26T17:45:13.515Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b0/a7edab2a00cdcb2688e1c943401cb3236323e7bfd2839815c6131a3742f4/orjson-3.11.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8c752089db84333e36d754c4baf19c0e1437012242048439c7e80eb0e6426e3b", size = 238259, upload-time = "2025-08-26T17:45:15.093Z" }, + { url = "https://files.pythonhosted.org/packages/e1/c6/ff4865a9cc398a07a83342713b5932e4dc3cb4bf4bc04e8f83dedfc0d736/orjson-3.11.3-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:9b8761b6cf04a856eb544acdd82fc594b978f12ac3602d6374a7edb9d86fd2c2", size = 127633, upload-time = "2025-08-26T17:45:16.417Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e6/e00bea2d9472f44fe8794f523e548ce0ad51eb9693cf538a753a27b8bda4/orjson-3.11.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b13974dc8ac6ba22feaa867fc19135a3e01a134b4f7c9c28162fed4d615008a", size = 123061, upload-time = "2025-08-26T17:45:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/54/31/9fbb78b8e1eb3ac605467cb846e1c08d0588506028b37f4ee21f978a51d4/orjson-3.11.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f83abab5bacb76d9c821fd5c07728ff224ed0e52d7a71b7b3de822f3df04e15c", size = 127956, upload-time = "2025-08-26T17:45:19.172Z" }, + { url = "https://files.pythonhosted.org/packages/36/88/b0604c22af1eed9f98d709a96302006915cfd724a7ebd27d6dd11c22d80b/orjson-3.11.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6fbaf48a744b94091a56c62897b27c31ee2da93d826aa5b207131a1e13d4064", size = 130790, upload-time = "2025-08-26T17:45:20.586Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9d/1c1238ae9fffbfed51ba1e507731b3faaf6b846126a47e9649222b0fd06f/orjson-3.11.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc779b4f4bba2847d0d2940081a7b6f7b5877e05408ffbb74fa1faf4a136c424", size = 132385, upload-time = "2025-08-26T17:45:22.036Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b5/c06f1b090a1c875f337e21dd71943bc9d84087f7cdf8c6e9086902c34e42/orjson-3.11.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd4b909ce4c50faa2192da6bb684d9848d4510b736b0611b6ab4020ea6fd2d23", size = 135305, upload-time = "2025-08-26T17:45:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/a0/26/5f028c7d81ad2ebbf84414ba6d6c9cac03f22f5cd0d01eb40fb2d6a06b07/orjson-3.11.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524b765ad888dc5518bbce12c77c2e83dee1ed6b0992c1790cc5fb49bb4b6667", size = 132875, upload-time = "2025-08-26T17:45:25.182Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d4/b8df70d9cfb56e385bf39b4e915298f9ae6c61454c8154a0f5fd7efcd42e/orjson-3.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:84fd82870b97ae3cdcea9d8746e592b6d40e1e4d4527835fc520c588d2ded04f", size = 130940, upload-time = "2025-08-26T17:45:27.209Z" }, + { url = "https://files.pythonhosted.org/packages/da/5e/afe6a052ebc1a4741c792dd96e9f65bf3939d2094e8b356503b68d48f9f5/orjson-3.11.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fbecb9709111be913ae6879b07bafd4b0785b44c1eb5cac8ac76da048b3885a1", size = 403852, upload-time = "2025-08-26T17:45:28.478Z" }, + { url = "https://files.pythonhosted.org/packages/f8/90/7bbabafeb2ce65915e9247f14a56b29c9334003536009ef5b122783fe67e/orjson-3.11.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9dba358d55aee552bd868de348f4736ca5a4086d9a62e2bfbbeeb5629fe8b0cc", size = 146293, upload-time = "2025-08-26T17:45:29.86Z" }, + { url = "https://files.pythonhosted.org/packages/27/b3/2d703946447da8b093350570644a663df69448c9d9330e5f1d9cce997f20/orjson-3.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eabcf2e84f1d7105f84580e03012270c7e97ecb1fb1618bda395061b2a84a049", size = 135470, upload-time = "2025-08-26T17:45:31.243Z" }, + { url = "https://files.pythonhosted.org/packages/38/70/b14dcfae7aff0e379b0119c8a812f8396678919c431efccc8e8a0263e4d9/orjson-3.11.3-cp312-cp312-win32.whl", hash = "sha256:3782d2c60b8116772aea8d9b7905221437fdf53e7277282e8d8b07c220f96cca", size = 136248, upload-time = "2025-08-26T17:45:32.567Z" }, + { url = "https://files.pythonhosted.org/packages/35/b8/9e3127d65de7fff243f7f3e53f59a531bf6bb295ebe5db024c2503cc0726/orjson-3.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:79b44319268af2eaa3e315b92298de9a0067ade6e6003ddaef72f8e0bedb94f1", size = 131437, upload-time = "2025-08-26T17:45:34.949Z" }, + { url = "https://files.pythonhosted.org/packages/51/92/a946e737d4d8a7fd84a606aba96220043dcc7d6988b9e7551f7f6d5ba5ad/orjson-3.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:0e92a4e83341ef79d835ca21b8bd13e27c859e4e9e4d7b63defc6e58462a3710", size = 125978, upload-time = "2025-08-26T17:45:36.422Z" }, + { url = "https://files.pythonhosted.org/packages/fc/79/8932b27293ad35919571f77cb3693b5906cf14f206ef17546052a241fdf6/orjson-3.11.3-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:af40c6612fd2a4b00de648aa26d18186cd1322330bd3a3cc52f87c699e995810", size = 238127, upload-time = "2025-08-26T17:45:38.146Z" }, + { url = "https://files.pythonhosted.org/packages/1c/82/cb93cd8cf132cd7643b30b6c5a56a26c4e780c7a145db6f83de977b540ce/orjson-3.11.3-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:9f1587f26c235894c09e8b5b7636a38091a9e6e7fe4531937534749c04face43", size = 127494, upload-time = "2025-08-26T17:45:39.57Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/2d9eb181a9b6bb71463a78882bcac1027fd29cf62c38a40cc02fc11d3495/orjson-3.11.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61dcdad16da5bb486d7227a37a2e789c429397793a6955227cedbd7252eb5a27", size = 123017, upload-time = "2025-08-26T17:45:40.876Z" }, + { url = "https://files.pythonhosted.org/packages/b4/14/a0e971e72d03b509190232356d54c0f34507a05050bd026b8db2bf2c192c/orjson-3.11.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11c6d71478e2cbea0a709e8a06365fa63da81da6498a53e4c4f065881d21ae8f", size = 127898, upload-time = "2025-08-26T17:45:42.188Z" }, + { url = "https://files.pythonhosted.org/packages/8e/af/dc74536722b03d65e17042cc30ae586161093e5b1f29bccda24765a6ae47/orjson-3.11.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff94112e0098470b665cb0ed06efb187154b63649403b8d5e9aedeb482b4548c", size = 130742, upload-time = "2025-08-26T17:45:43.511Z" }, + { url = "https://files.pythonhosted.org/packages/62/e6/7a3b63b6677bce089fe939353cda24a7679825c43a24e49f757805fc0d8a/orjson-3.11.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b756575aaa2a855a75192f356bbda11a89169830e1439cfb1a3e1a6dde7be", size = 132377, upload-time = "2025-08-26T17:45:45.525Z" }, + { url = "https://files.pythonhosted.org/packages/fc/cd/ce2ab93e2e7eaf518f0fd15e3068b8c43216c8a44ed82ac2b79ce5cef72d/orjson-3.11.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c9416cc19a349c167ef76135b2fe40d03cea93680428efee8771f3e9fb66079d", size = 135313, upload-time = "2025-08-26T17:45:46.821Z" }, + { url = "https://files.pythonhosted.org/packages/d0/b4/f98355eff0bd1a38454209bbc73372ce351ba29933cb3e2eba16c04b9448/orjson-3.11.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b822caf5b9752bc6f246eb08124c3d12bf2175b66ab74bac2ef3bbf9221ce1b2", size = 132908, upload-time = "2025-08-26T17:45:48.126Z" }, + { url = "https://files.pythonhosted.org/packages/eb/92/8f5182d7bc2a1bed46ed960b61a39af8389f0ad476120cd99e67182bfb6d/orjson-3.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:414f71e3bdd5573893bf5ecdf35c32b213ed20aa15536fe2f588f946c318824f", size = 130905, upload-time = "2025-08-26T17:45:49.414Z" }, + { url = "https://files.pythonhosted.org/packages/1a/60/c41ca753ce9ffe3d0f67b9b4c093bdd6e5fdb1bc53064f992f66bb99954d/orjson-3.11.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:828e3149ad8815dc14468f36ab2a4b819237c155ee1370341b91ea4c8672d2ee", size = 403812, upload-time = "2025-08-26T17:45:51.085Z" }, + { url = "https://files.pythonhosted.org/packages/dd/13/e4a4f16d71ce1868860db59092e78782c67082a8f1dc06a3788aef2b41bc/orjson-3.11.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac9e05f25627ffc714c21f8dfe3a579445a5c392a9c8ae7ba1d0e9fb5333f56e", size = 146277, upload-time = "2025-08-26T17:45:52.851Z" }, + { url = "https://files.pythonhosted.org/packages/8d/8b/bafb7f0afef9344754a3a0597a12442f1b85a048b82108ef2c956f53babd/orjson-3.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e44fbe4000bd321d9f3b648ae46e0196d21577cf66ae684a96ff90b1f7c93633", size = 135418, upload-time = "2025-08-26T17:45:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/60/d4/bae8e4f26afb2c23bea69d2f6d566132584d1c3a5fe89ee8c17b718cab67/orjson-3.11.3-cp313-cp313-win32.whl", hash = "sha256:2039b7847ba3eec1f5886e75e6763a16e18c68a63efc4b029ddf994821e2e66b", size = 136216, upload-time = "2025-08-26T17:45:57.182Z" }, + { url = "https://files.pythonhosted.org/packages/88/76/224985d9f127e121c8cad882cea55f0ebe39f97925de040b75ccd4b33999/orjson-3.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:29be5ac4164aa8bdcba5fa0700a3c9c316b411d8ed9d39ef8a882541bd452fae", size = 131362, upload-time = "2025-08-26T17:45:58.56Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cf/0dce7a0be94bd36d1346be5067ed65ded6adb795fdbe3abd234c8d576d01/orjson-3.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:18bd1435cb1f2857ceb59cfb7de6f92593ef7b831ccd1b9bfb28ca530e539dce", size = 125989, upload-time = "2025-08-26T17:45:59.95Z" }, + { url = "https://files.pythonhosted.org/packages/ef/77/d3b1fef1fc6aaeed4cbf3be2b480114035f4df8fa1a99d2dac1d40d6e924/orjson-3.11.3-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cf4b81227ec86935568c7edd78352a92e97af8da7bd70bdfdaa0d2e0011a1ab4", size = 238115, upload-time = "2025-08-26T17:46:01.669Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6d/468d21d49bb12f900052edcfbf52c292022d0a323d7828dc6376e6319703/orjson-3.11.3-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:bc8bc85b81b6ac9fc4dae393a8c159b817f4c2c9dee5d12b773bddb3b95fc07e", size = 127493, upload-time = "2025-08-26T17:46:03.466Z" }, + { url = "https://files.pythonhosted.org/packages/67/46/1e2588700d354aacdf9e12cc2d98131fb8ac6f31ca65997bef3863edb8ff/orjson-3.11.3-cp314-cp314-manylinux_2_34_aarch64.whl", hash = "sha256:88dcfc514cfd1b0de038443c7b3e6a9797ffb1b3674ef1fd14f701a13397f82d", size = 122998, upload-time = "2025-08-26T17:46:04.803Z" }, + { url = "https://files.pythonhosted.org/packages/3b/94/11137c9b6adb3779f1b34fd98be51608a14b430dbc02c6d41134fbba484c/orjson-3.11.3-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:d61cd543d69715d5fc0a690c7c6f8dcc307bc23abef9738957981885f5f38229", size = 132915, upload-time = "2025-08-26T17:46:06.237Z" }, + { url = "https://files.pythonhosted.org/packages/10/61/dccedcf9e9bcaac09fdabe9eaee0311ca92115699500efbd31950d878833/orjson-3.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2b7b153ed90ababadbef5c3eb39549f9476890d339cf47af563aea7e07db2451", size = 130907, upload-time = "2025-08-26T17:46:07.581Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/0e935539aa7b08b3ca0f817d73034f7eb506792aae5ecc3b7c6e679cdf5f/orjson-3.11.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7909ae2460f5f494fecbcd10613beafe40381fd0316e35d6acb5f3a05bfda167", size = 403852, upload-time = "2025-08-26T17:46:08.982Z" }, + { url = "https://files.pythonhosted.org/packages/4a/2b/50ae1a5505cd1043379132fdb2adb8a05f37b3e1ebffe94a5073321966fd/orjson-3.11.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:2030c01cbf77bc67bee7eef1e7e31ecf28649353987775e3583062c752da0077", size = 146309, upload-time = "2025-08-26T17:46:10.576Z" }, + { url = "https://files.pythonhosted.org/packages/cd/1d/a473c158e380ef6f32753b5f39a69028b25ec5be331c2049a2201bde2e19/orjson-3.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a0169ebd1cbd94b26c7a7ad282cf5c2744fce054133f959e02eb5265deae1872", size = 135424, upload-time = "2025-08-26T17:46:12.386Z" }, + { url = "https://files.pythonhosted.org/packages/da/09/17d9d2b60592890ff7382e591aa1d9afb202a266b180c3d4049b1ec70e4a/orjson-3.11.3-cp314-cp314-win32.whl", hash = "sha256:0c6d7328c200c349e3a4c6d8c83e0a5ad029bdc2d417f234152bf34842d0fc8d", size = 136266, upload-time = "2025-08-26T17:46:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/15/58/358f6846410a6b4958b74734727e582ed971e13d335d6c7ce3e47730493e/orjson-3.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:317bbe2c069bbc757b1a2e4105b64aacd3bc78279b66a6b9e51e846e4809f804", size = 131351, upload-time = "2025-08-26T17:46:15.27Z" }, + { url = "https://files.pythonhosted.org/packages/28/01/d6b274a0635be0468d4dbd9cafe80c47105937a0d42434e805e67cd2ed8b/orjson-3.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:e8f6a7a27d7b7bec81bd5924163e9af03d49bbb63013f107b48eb5d16db711bc", size = 125985, upload-time = "2025-08-26T17:46:16.67Z" }, ] [[package]] @@ -944,6 +1820,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "playwright" +version = "1.55.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet" }, + { name = "pyee" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/3a/c81ff76df266c62e24f19718df9c168f49af93cabdbc4608ae29656a9986/playwright-1.55.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:d7da108a95001e412effca4f7610de79da1637ccdf670b1ae3fdc08b9694c034", size = 40428109, upload-time = "2025-08-28T15:46:20.357Z" }, + { url = "https://files.pythonhosted.org/packages/cf/f5/bdb61553b20e907196a38d864602a9b4a461660c3a111c67a35179b636fa/playwright-1.55.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8290cf27a5d542e2682ac274da423941f879d07b001f6575a5a3a257b1d4ba1c", size = 38687254, upload-time = "2025-08-28T15:46:23.925Z" }, + { url = "https://files.pythonhosted.org/packages/4a/64/48b2837ef396487807e5ab53c76465747e34c7143fac4a084ef349c293a8/playwright-1.55.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:25b0d6b3fd991c315cca33c802cf617d52980108ab8431e3e1d37b5de755c10e", size = 40428108, upload-time = "2025-08-28T15:46:27.119Z" }, + { url = "https://files.pythonhosted.org/packages/08/33/858312628aa16a6de97839adc2ca28031ebc5391f96b6fb8fdf1fcb15d6c/playwright-1.55.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c6d4d8f6f8c66c483b0835569c7f0caa03230820af8e500c181c93509c92d831", size = 45905643, upload-time = "2025-08-28T15:46:30.312Z" }, + { url = "https://files.pythonhosted.org/packages/83/83/b8d06a5b5721931aa6d5916b83168e28bd891f38ff56fe92af7bdee9860f/playwright-1.55.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a0777c4ce1273acf90c87e4ae2fe0130182100d99bcd2ae5bf486093044838", size = 45296647, upload-time = "2025-08-28T15:46:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/06/2e/9db64518aebcb3d6ef6cd6d4d01da741aff912c3f0314dadb61226c6a96a/playwright-1.55.0-py3-none-win32.whl", hash = "sha256:29e6d1558ad9d5b5c19cbec0a72f6a2e35e6353cd9f262e22148685b86759f90", size = 35476046, upload-time = "2025-08-28T15:46:36.184Z" }, + { url = "https://files.pythonhosted.org/packages/46/4f/9ba607fa94bb9cee3d4beb1c7b32c16efbfc9d69d5037fa85d10cafc618b/playwright-1.55.0-py3-none-win_amd64.whl", hash = "sha256:7eb5956473ca1951abb51537e6a0da55257bb2e25fc37c2b75af094a5c93736c", size = 35476048, upload-time = "2025-08-28T15:46:38.867Z" }, + { url = "https://files.pythonhosted.org/packages/21/98/5ca173c8ec906abde26c28e1ecb34887343fd71cc4136261b90036841323/playwright-1.55.0-py3-none-win_arm64.whl", hash = "sha256:012dc89ccdcbd774cdde8aeee14c08e0dd52ddb9135bf10e9db040527386bd76", size = 31225543, upload-time = "2025-08-28T15:46:41.613Z" }, +] + [[package]] name = "pluggy" version = "1.6.0" @@ -955,28 +1868,101 @@ wheels = [ [[package]] name = "prompt-toolkit" -version = "3.0.51" +version = "3.0.52" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] [[package]] name = "protobuf" -version = "5.29.4" +version = "5.29.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/7d/b9dca7365f0e2c4fa7c193ff795427cfa6290147e5185ab11ece280a18e7/protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99", size = 424902, upload-time = "2025-03-19T21:23:24.25Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/b2/043a1a1a20edd134563699b0e91862726a0dc9146c090743b6c44d798e75/protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7", size = 422709, upload-time = "2025-03-19T21:23:08.293Z" }, - { url = "https://files.pythonhosted.org/packages/79/fc/2474b59570daa818de6124c0a15741ee3e5d6302e9d6ce0bdfd12e98119f/protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d", size = 434506, upload-time = "2025-03-19T21:23:11.253Z" }, - { url = "https://files.pythonhosted.org/packages/46/de/7c126bbb06aa0f8a7b38aaf8bd746c514d70e6a2a3f6dd460b3b7aad7aae/protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0", size = 417826, upload-time = "2025-03-19T21:23:13.132Z" }, - { url = "https://files.pythonhosted.org/packages/a2/b5/bade14ae31ba871a139aa45e7a8183d869efe87c34a4850c87b936963261/protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e", size = 319574, upload-time = "2025-03-19T21:23:14.531Z" }, - { url = "https://files.pythonhosted.org/packages/46/88/b01ed2291aae68b708f7d334288ad5fb3e7aa769a9c309c91a0d55cb91b0/protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922", size = 319672, upload-time = "2025-03-19T21:23:15.839Z" }, - { url = "https://files.pythonhosted.org/packages/12/fb/a586e0c973c95502e054ac5f81f88394f24ccc7982dac19c515acd9e2c93/protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862", size = 172551, upload-time = "2025-03-19T21:23:22.682Z" }, + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, ] [[package]] @@ -1002,7 +1988,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.5" +version = "2.11.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1010,30 +1996,29 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, ] [[package]] name = "pydantic-ai" -version = "0.2.9" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pydantic-ai-slim", extra = ["a2a", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "mcp", "mistral", "openai", "vertexai"] }, + { name = "pydantic-ai-slim", extra = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "vertexai"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/fb/c9f669244c239e4331bc6028b23e7d36e7f6f5164243b518dba86016c54f/pydantic_ai-0.2.9.tar.gz", hash = "sha256:cbe410c6ede774a82d99e81bc59ad386f6ffeddf6355ce2cfa42198067621075", size = 40500179, upload-time = "2025-05-26T07:48:34.734Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/cc/3b3cd81f35a7561c5b966a178c4cc551d27f4e8eab0fddcf26ad757f7b72/pydantic_ai-1.0.5.tar.gz", hash = "sha256:f5bf7d3c2bebecfe5b538fdc81fbf783815b36bb8a2e5f72e7633189d50e038d", size = 43969568, upload-time = "2025-09-12T01:24:13.504Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/a2/78f76415126ada87108a8b5b14ae4b2a792c6ef9a4538a8923208bbc1908/pydantic_ai-0.2.9-py3-none-any.whl", hash = "sha256:c267127f11146e98a044c350af01e912b28b394100212a6a947973d3f6b15e7f", size = 10123, upload-time = "2025-05-26T07:48:24.179Z" }, + { url = "https://files.pythonhosted.org/packages/31/30/ac51043eb56ffa21fb745210dbd9c463c5f2ce5fa21c349fcd8e271a998b/pydantic_ai-1.0.5-py3-none-any.whl", hash = "sha256:9087673ce885f1cdac2fd5cfa6fb431367b91bd4e496c5c0c1ede3c3186510d2", size = 11668, upload-time = "2025-09-12T01:24:01.564Z" }, ] [[package]] name = "pydantic-ai-slim" -version = "0.2.9" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "eval-type-backport" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "genai-prices" }, { name = "griffe" }, { name = "httpx" }, { name = "opentelemetry-api" }, @@ -1041,14 +2026,15 @@ dependencies = [ { name = "pydantic-graph" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/59/780411777eff7d5c46ac832111051d0c1d873ab63aacc0f705a762a25398/pydantic_ai_slim-0.2.9.tar.gz", hash = "sha256:0cf3ec26bedd2f723e7ddb9e14096a3b265e7f48dbd65cf686735bb0e8df39dd", size = 134776, upload-time = "2025-05-26T07:48:38.436Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/94/cd20ef89079e3f4c68c485be1ef07f3090801bbfbffa0aa389122e13cf7b/pydantic_ai_slim-1.0.5.tar.gz", hash = "sha256:5f8bf37e4f1744ee5aff91dbcbdc68f3a13142fb53d460195139b0e221e8563e", size = 241494, upload-time = "2025-09-12T01:24:18.088Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/23/b4d52d83c302859e1e251a8c8a360b993cf8b4818c8b633adaa98b043556/pydantic_ai_slim-0.2.9-py3-none-any.whl", hash = "sha256:d954ff84cb250d7150a7ed694e4f1f92f820205d036ee006d02fce3e62a3bc4e", size = 175019, upload-time = "2025-05-26T07:48:27.326Z" }, + { url = "https://files.pythonhosted.org/packages/b9/df/d95d9420bcd95801407d475db50369814f7fec3cecd3e834796055ffa601/pydantic_ai_slim-1.0.5-py3-none-any.whl", hash = "sha256:4220de1154ae9f2f5818dc622d0659cb1380e4eb251ec2b185d07ace8ea4b78b", size = 324337, upload-time = "2025-09-12T01:24:05.256Z" }, ] [package.optional-dependencies] -a2a = [ - { name = "fasta2a" }, +ag-ui = [ + { name = "ag-ui-protocol" }, + { name = "starlette" }, ] anthropic = [ { name = "anthropic" }, @@ -1059,6 +2045,7 @@ bedrock = [ cli = [ { name = "argcomplete" }, { name = "prompt-toolkit" }, + { name = "pyperclip" }, { name = "rich" }, ] cohere = [ @@ -1073,6 +2060,12 @@ google = [ groq = [ { name = "groq" }, ] +huggingface = [ + { name = "huggingface-hub", extra = ["inference"] }, +] +logfire = [ + { name = "logfire", extra = ["httpx"] }, +] mcp = [ { name = "mcp" }, ] @@ -1082,6 +2075,12 @@ mistral = [ openai = [ { name = "openai" }, ] +retries = [ + { name = "tenacity" }, +] +temporal = [ + { name = "temporalio" }, +] vertexai = [ { name = "google-auth" }, { name = "requests" }, @@ -1096,19 +2095,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, @@ -1154,15 +2140,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, @@ -1176,25 +2153,24 @@ wheels = [ [[package]] name = "pydantic-evals" -version = "0.2.9" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, - { name = "eval-type-backport", marker = "python_full_version < '3.11'" }, { name = "logfire-api" }, { name = "pydantic" }, { name = "pydantic-ai-slim" }, { name = "pyyaml" }, { name = "rich" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/7f/4ede6f6642067f4c82a32b87a4f4a2b84120fca218896e311cdb30702e86/pydantic_evals-0.2.9.tar.gz", hash = "sha256:62b00d27391e115416959d6620ee018aa2c3f80bd656edc17026a4ab8152c3df", size = 42397, upload-time = "2025-05-26T07:48:39.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8f/39a94a325a5e93d5a3e8ca84112ce230d49486acac891ec0e6c48f2e91d3/pydantic_evals-1.0.5.tar.gz", hash = "sha256:733ae79baf08894b593a2bce840c27ba57e8f5b5c8fd03e46588e164dae1f3c4", size = 45491, upload-time = "2025-09-12T01:24:19.594Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/6e/8d88e00f624a8348b286b219a292fe3e077ee973660dcff6b4ddd5a04e85/pydantic_evals-0.2.9-py3-none-any.whl", hash = "sha256:62035ae3a5321e4d892c7372ef91af0f46b675863e827f011d5cb8550dede400", size = 51220, upload-time = "2025-05-26T07:48:28.79Z" }, + { url = "https://files.pythonhosted.org/packages/1a/69/8fa916d888b2a97a954d6d2e6bc4d103aa44919bb3d5b12754487abe2308/pydantic_evals-1.0.5-py3-none-any.whl", hash = "sha256:615566c0655a1c8230bd437563fef1bad05f61ed9b5222a9f62e9aa23070697b", size = 54600, upload-time = "2025-09-12T01:24:06.975Z" }, ] [[package]] name = "pydantic-graph" -version = "0.2.9" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -1202,62 +2178,141 @@ dependencies = [ { name = "pydantic" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/b5/29b70b5fd291c6e5d9d66ead152d2571165172edec27d67a03539ae527c4/pydantic_graph-0.2.9.tar.gz", hash = "sha256:52534a2011f53def4797821ad9de9e7862040ee8e3ee4b3b9a5b12d07f3e756e", size = 21838, upload-time = "2025-05-26T07:48:40.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/17/f7/e414b085cfb6f0754d734473bf57aaf0355b7714aae1200c5c4288d2ac56/pydantic_graph-1.0.5.tar.gz", hash = "sha256:cb84af6778aef0a35c1eeca3231f619bc2d53dc4c6d4ec4cfd249f940e710ec7", size = 21898, upload-time = "2025-09-12T01:24:20.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/cc/e609261763a76f4d23a545afb462847592bc6b4d8eb412990b9b913c073e/pydantic_graph-0.2.9-py3-none-any.whl", hash = "sha256:38ad929a0ec205bd7d5875b0b408d4f13448276aa89b6ce2a1143a7552b070ce", size = 27474, upload-time = "2025-05-26T07:48:30.047Z" }, + { url = "https://files.pythonhosted.org/packages/57/8e/034d9f8effb033bfea6ad69edce2cf3ff5b060481003b6b8997e75cc169e/pydantic_graph-1.0.5-py3-none-any.whl", hash = "sha256:cfd229d0efb241e0f6f0a0c5a7401cc12f439bf5f41cd33351b4c0331e81ac16", size = 27538, upload-time = "2025-09-12T01:24:08.65Z" }, ] [[package]] name = "pydantic-settings" -version = "2.9.1" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" }, + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "pyee" +version = "13.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" }, ] [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[[package]] +name = "pyobjc-core" +version = "11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/e9/0b85c81e2b441267bca707b5d89f56c2f02578ef8f3eafddf0e0c0b8848c/pyobjc_core-11.1.tar.gz", hash = "sha256:b63d4d90c5df7e762f34739b39cc55bc63dbcf9fb2fb3f2671e528488c7a87fe", size = 974602, upload-time = "2025-06-14T20:56:34.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/a7/55afc166d89e3fcd87966f48f8bca3305a3a2d7c62100715b9ffa7153a90/pyobjc_core-11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ec36680b5c14e2f73d432b03ba7c1457dc6ca70fa59fd7daea1073f2b4157d33", size = 671075, upload-time = "2025-06-14T20:44:46.594Z" }, + { url = "https://files.pythonhosted.org/packages/c0/09/e83228e878e73bf756749939f906a872da54488f18d75658afa7f1abbab1/pyobjc_core-11.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:765b97dea6b87ec4612b3212258024d8496ea23517c95a1c5f0735f96b7fd529", size = 677985, upload-time = "2025-06-14T20:44:48.375Z" }, + { url = "https://files.pythonhosted.org/packages/c5/24/12e4e2dae5f85fd0c0b696404ed3374ea6ca398e7db886d4f1322eb30799/pyobjc_core-11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:18986f83998fbd5d3f56d8a8428b2f3e0754fd15cef3ef786ca0d29619024f2c", size = 676431, upload-time = "2025-06-14T20:44:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/f7/79/031492497624de4c728f1857181b06ce8c56444db4d49418fa459cba217c/pyobjc_core-11.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:8849e78cfe6595c4911fbba29683decfb0bf57a350aed8a43316976ba6f659d2", size = 719330, upload-time = "2025-06-14T20:44:51.621Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7d/6169f16a0c7ec15b9381f8bf33872baf912de2ef68d96c798ca4c6ee641f/pyobjc_core-11.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8cb9ed17a8d84a312a6e8b665dd22393d48336ea1d8277e7ad20c19a38edf731", size = 667203, upload-time = "2025-06-14T20:44:53.262Z" }, + { url = "https://files.pythonhosted.org/packages/49/0f/f5ab2b0e57430a3bec9a62b6153c0e79c05a30d77b564efdb9f9446eeac5/pyobjc_core-11.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:f2455683e807f8541f0d83fbba0f5d9a46128ab0d5cc83ea208f0bec759b7f96", size = 708807, upload-time = "2025-06-14T20:44:54.851Z" }, +] + +[[package]] +name = "pyobjc-framework-cocoa" +version = "11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyobjc-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/c5/7a866d24bc026f79239b74d05e2cf3088b03263da66d53d1b4cf5207f5ae/pyobjc_framework_cocoa-11.1.tar.gz", hash = "sha256:87df76b9b73e7ca699a828ff112564b59251bb9bbe72e610e670a4dc9940d038", size = 5565335, upload-time = "2025-06-14T20:56:59.683Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/43/6841046aa4e257b6276cd23e53cacedfb842ecaf3386bb360fa9cc319aa1/pyobjc_framework_cocoa-11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b9a9b8ba07f5bf84866399e3de2aa311ed1c34d5d2788a995bdbe82cc36cfa0", size = 388177, upload-time = "2025-06-14T20:46:51.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/da/41c0f7edc92ead461cced7e67813e27fa17da3c5da428afdb4086c69d7ba/pyobjc_framework_cocoa-11.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:806de56f06dfba8f301a244cce289d54877c36b4b19818e3b53150eb7c2424d0", size = 388983, upload-time = "2025-06-14T20:46:52.591Z" }, + { url = "https://files.pythonhosted.org/packages/4e/0b/a01477cde2a040f97e226f3e15e5ffd1268fcb6d1d664885a95ba592eca9/pyobjc_framework_cocoa-11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:54e93e1d9b0fc41c032582a6f0834befe1d418d73893968f3f450281b11603da", size = 389049, upload-time = "2025-06-14T20:46:53.757Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/64cf2661f6ab7c124d0486ec6d1d01a9bb2838a0d2a46006457d8c5e6845/pyobjc_framework_cocoa-11.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:fd5245ee1997d93e78b72703be1289d75d88ff6490af94462b564892e9266350", size = 393110, upload-time = "2025-06-14T20:46:54.894Z" }, + { url = "https://files.pythonhosted.org/packages/33/87/01e35c5a3c5bbdc93d5925366421e10835fcd7b23347b6c267df1b16d0b3/pyobjc_framework_cocoa-11.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:aede53a1afc5433e1e7d66568cc52acceeb171b0a6005407a42e8e82580b4fc0", size = 392644, upload-time = "2025-06-14T20:46:56.503Z" }, + { url = "https://files.pythonhosted.org/packages/c1/7c/54afe9ffee547c41e1161691e72067a37ed27466ac71c089bfdcd07ca70d/pyobjc_framework_cocoa-11.1-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:1b5de4e1757bb65689d6dc1f8d8717de9ec8587eb0c4831c134f13aba29f9b71", size = 396742, upload-time = "2025-06-14T20:46:57.64Z" }, +] + +[[package]] +name = "pyperclip" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/99/25f4898cf420efb6f45f519de018f4faea5391114a8618b16736ef3029f1/pyperclip-1.10.0.tar.gz", hash = "sha256:180c8346b1186921c75dfd14d9048a6b5d46bfc499778811952c6dd6eb1ca6be", size = 12193, upload-time = "2025-09-18T00:54:00.384Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/bc/22540e73c5f5ae18f02924cd3954a6c9a4aa6b713c841a94c98335d333a1/pyperclip-1.10.0-py3-none-any.whl", hash = "sha256:596fbe55dc59263bff26e61d2afbe10223e2fccb5210c9c96a28d6887cfcc7ec", size = 11062, upload-time = "2025-09-18T00:53:59.252Z" }, +] + +[[package]] +name = "pyrate-limiter" +version = "3.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/da/f682c5c5f9f0a5414363eb4397e6b07d84a02cde69c4ceadcbf32c85537c/pyrate_limiter-3.9.0.tar.gz", hash = "sha256:6b882e2c77cda07a241d3730975daea4258344b39c878f1dd8849df73f70b0ce", size = 289308, upload-time = "2025-07-30T14:36:58.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/af/d8bf0959ece9bc4679bd203908c31019556a421d76d8143b0c6871c7f614/pyrate_limiter-3.9.0-py3-none-any.whl", hash = "sha256:77357840c8cf97a36d67005d4e090787043f54000c12c2b414ff65657653e378", size = 33628, upload-time = "2025-07-30T14:36:57.71Z" }, +] + +[[package]] +name = "pysocks" +version = "1.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload-time = "2019-09-20T02:07:35.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, + { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" }, ] [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, ] [[package]] name = "pytest-cov" -version = "6.1.1" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/69/5f1e57f6c5a39f81411b550027bf72842c4567ff5fd572bed1edc9e4b5d9/pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", size = 66857, upload-time = "2025-04-05T14:07:51.592Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/d0/def53b4a790cfb21483016430ed828f64830dd981ebe1089971cd10cab25/pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde", size = 23841, upload-time = "2025-04-05T14:07:49.641Z" }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] [[package]] @@ -1274,11 +2329,11 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.1.0" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] [[package]] @@ -1290,21 +2345,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, @@ -1334,9 +2399,104 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] +[[package]] +name = "rapidfuzz" +version = "3.14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/fc/a98b616db9a42dcdda7c78c76bdfdf6fe290ac4c5ffbb186f73ec981ad5b/rapidfuzz-3.14.1.tar.gz", hash = "sha256:b02850e7f7152bd1edff27e9d584505b84968cacedee7a734ec4050c655a803c", size = 57869570, upload-time = "2025-09-08T21:08:15.922Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/c7/c3c860d512606225c11c8ee455b4dc0b0214dbcfac90a2c22dddf55320f3/rapidfuzz-3.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d976701060886a791c8a9260b1d4139d14c1f1e9a6ab6116b45a1acf3baff67", size = 1938398, upload-time = "2025-09-08T21:05:44.031Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f3/67f5c5cd4d728993c48c1dcb5da54338d77c03c34b4903cc7839a3b89faf/rapidfuzz-3.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e6ba7e6eb2ab03870dcab441d707513db0b4264c12fba7b703e90e8b4296df2", size = 1392819, upload-time = "2025-09-08T21:05:45.549Z" }, + { url = "https://files.pythonhosted.org/packages/d5/06/400d44842f4603ce1bebeaeabe776f510e329e7dbf6c71b6f2805e377889/rapidfuzz-3.14.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e532bf46de5fd3a1efde73a16a4d231d011bce401c72abe3c6ecf9de681003f", size = 1391798, upload-time = "2025-09-08T21:05:47.044Z" }, + { url = "https://files.pythonhosted.org/packages/90/97/a6944955713b47d88e8ca4305ca7484940d808c4e6c4e28b6fa0fcbff97e/rapidfuzz-3.14.1-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f9b6a6fb8ed9b951e5f3b82c1ce6b1665308ec1a0da87f799b16e24fc59e4662", size = 1699136, upload-time = "2025-09-08T21:05:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/a8/1e/f311a5c95ddf922db6dd8666efeceb9ac69e1319ed098ac80068a4041732/rapidfuzz-3.14.1-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b6ac3f9810949caef0e63380b11a3c32a92f26bacb9ced5e32c33560fcdf8d1", size = 2236238, upload-time = "2025-09-08T21:05:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/85/27/e14e9830255db8a99200f7111b158ddef04372cf6332a415d053fe57cc9c/rapidfuzz-3.14.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e52e4c34fd567f77513e886b66029c1ae02f094380d10eba18ba1c68a46d8b90", size = 3183685, upload-time = "2025-09-08T21:05:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/61/b2/42850c9616ddd2887904e5dd5377912cbabe2776fdc9fd4b25e6e12fba32/rapidfuzz-3.14.1-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2ef72e41b1a110149f25b14637f1cedea6df192462120bea3433980fe9d8ac05", size = 1231523, upload-time = "2025-09-08T21:05:53.927Z" }, + { url = "https://files.pythonhosted.org/packages/de/b5/6b90ed7127a1732efef39db46dd0afc911f979f215b371c325a2eca9cb15/rapidfuzz-3.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fb654a35b373d712a6b0aa2a496b2b5cdd9d32410cfbaecc402d7424a90ba72a", size = 2415209, upload-time = "2025-09-08T21:05:55.422Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/af51c50d238c82f2179edc4b9f799cc5a50c2c0ebebdcfaa97ded7d02978/rapidfuzz-3.14.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2b2c12e5b9eb8fe9a51b92fe69e9ca362c0970e960268188a6d295e1dec91e6d", size = 2532957, upload-time = "2025-09-08T21:05:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/50/92/29811d2ba7c984251a342c4f9ccc7cc4aa09d43d800af71510cd51c36453/rapidfuzz-3.14.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4f069dec5c450bd987481e752f0a9979e8fdf8e21e5307f5058f5c4bb162fa56", size = 2815720, upload-time = "2025-09-08T21:05:58.618Z" }, + { url = "https://files.pythonhosted.org/packages/78/69/cedcdee16a49e49d4985eab73b59447f211736c5953a58f1b91b6c53a73f/rapidfuzz-3.14.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4d0d9163725b7ad37a8c46988cae9ebab255984db95ad01bf1987ceb9e3058dd", size = 3323704, upload-time = "2025-09-08T21:06:00.576Z" }, + { url = "https://files.pythonhosted.org/packages/76/3e/5a3f9a5540f18e0126e36f86ecf600145344acb202d94b63ee45211a18b8/rapidfuzz-3.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db656884b20b213d846f6bc990c053d1f4a60e6d4357f7211775b02092784ca1", size = 4287341, upload-time = "2025-09-08T21:06:02.301Z" }, + { url = "https://files.pythonhosted.org/packages/46/26/45db59195929dde5832852c9de8533b2ac97dcc0d852d1f18aca33828122/rapidfuzz-3.14.1-cp311-cp311-win32.whl", hash = "sha256:4b42f7b9c58cbcfbfaddc5a6278b4ca3b6cd8983e7fd6af70ca791dff7105fb9", size = 1726574, upload-time = "2025-09-08T21:06:04.357Z" }, + { url = "https://files.pythonhosted.org/packages/01/5c/a4caf76535f35fceab25b2aaaed0baecf15b3d1fd40746f71985d20f8c4b/rapidfuzz-3.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:e5847f30d7d4edefe0cb37294d956d3495dd127c1c56e9128af3c2258a520bb4", size = 1547124, upload-time = "2025-09-08T21:06:06.002Z" }, + { url = "https://files.pythonhosted.org/packages/c6/66/aa93b52f95a314584d71fa0b76df00bdd4158aafffa76a350f1ae416396c/rapidfuzz-3.14.1-cp311-cp311-win_arm64.whl", hash = "sha256:5087d8ad453092d80c042a08919b1cb20c8ad6047d772dc9312acd834da00f75", size = 816958, upload-time = "2025-09-08T21:06:07.509Z" }, + { url = "https://files.pythonhosted.org/packages/df/77/2f4887c9b786f203e50b816c1cde71f96642f194e6fa752acfa042cf53fd/rapidfuzz-3.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:809515194f628004aac1b1b280c3734c5ea0ccbd45938c9c9656a23ae8b8f553", size = 1932216, upload-time = "2025-09-08T21:06:09.342Z" }, + { url = "https://files.pythonhosted.org/packages/de/bd/b5e445d156cb1c2a87d36d8da53daf4d2a1d1729b4851660017898b49aa0/rapidfuzz-3.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0afcf2d6cb633d0d4260d8df6a40de2d9c93e9546e2c6b317ab03f89aa120ad7", size = 1393414, upload-time = "2025-09-08T21:06:10.959Z" }, + { url = "https://files.pythonhosted.org/packages/de/bd/98d065dd0a4479a635df855616980eaae1a1a07a876db9400d421b5b6371/rapidfuzz-3.14.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1c3d07d53dcafee10599da8988d2b1f39df236aee501ecbd617bd883454fcd", size = 1377194, upload-time = "2025-09-08T21:06:12.471Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/1265547b771128b686f3c431377ff1db2fa073397ed082a25998a7b06d4e/rapidfuzz-3.14.1-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6e9ee3e1eb0a027717ee72fe34dc9ac5b3e58119f1bd8dd15bc19ed54ae3e62b", size = 1669573, upload-time = "2025-09-08T21:06:14.016Z" }, + { url = "https://files.pythonhosted.org/packages/a8/57/e73755c52fb451f2054196404ccc468577f8da023b3a48c80bce29ee5d4a/rapidfuzz-3.14.1-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:70c845b64a033a20c44ed26bc890eeb851215148cc3e696499f5f65529afb6cb", size = 2217833, upload-time = "2025-09-08T21:06:15.666Z" }, + { url = "https://files.pythonhosted.org/packages/20/14/7399c18c460e72d1b754e80dafc9f65cb42a46cc8f29cd57d11c0c4acc94/rapidfuzz-3.14.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26db0e815213d04234298dea0d884d92b9cb8d4ba954cab7cf67a35853128a33", size = 3159012, upload-time = "2025-09-08T21:06:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5e/24f0226ddb5440cabd88605d2491f99ae3748a6b27b0bc9703772892ced7/rapidfuzz-3.14.1-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:6ad3395a416f8b126ff11c788531f157c7debeb626f9d897c153ff8980da10fb", size = 1227032, upload-time = "2025-09-08T21:06:21.06Z" }, + { url = "https://files.pythonhosted.org/packages/40/43/1d54a4ad1a5fac2394d5f28a3108e2bf73c26f4f23663535e3139cfede9b/rapidfuzz-3.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:61c5b9ab6f730e6478aa2def566223712d121c6f69a94c7cc002044799442afd", size = 2395054, upload-time = "2025-09-08T21:06:23.482Z" }, + { url = "https://files.pythonhosted.org/packages/0c/71/e9864cd5b0f086c4a03791f5dfe0155a1b132f789fe19b0c76fbabd20513/rapidfuzz-3.14.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:13e0ea3d0c533969158727d1bb7a08c2cc9a816ab83f8f0dcfde7e38938ce3e6", size = 2524741, upload-time = "2025-09-08T21:06:26.825Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0c/53f88286b912faf4a3b2619a60df4f4a67bd0edcf5970d7b0c1143501f0c/rapidfuzz-3.14.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6325ca435b99f4001aac919ab8922ac464999b100173317defb83eae34e82139", size = 2785311, upload-time = "2025-09-08T21:06:29.471Z" }, + { url = "https://files.pythonhosted.org/packages/53/9a/229c26dc4f91bad323f07304ee5ccbc28f0d21c76047a1e4f813187d0bad/rapidfuzz-3.14.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:07a9fad3247e68798424bdc116c1094e88ecfabc17b29edf42a777520347648e", size = 3303630, upload-time = "2025-09-08T21:06:31.094Z" }, + { url = "https://files.pythonhosted.org/packages/05/de/20e330d6d58cbf83da914accd9e303048b7abae2f198886f65a344b69695/rapidfuzz-3.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8ff5dbe78db0a10c1f916368e21d328935896240f71f721e073cf6c4c8cdedd", size = 4262364, upload-time = "2025-09-08T21:06:32.877Z" }, + { url = "https://files.pythonhosted.org/packages/1f/10/2327f83fad3534a8d69fe9cd718f645ec1fe828b60c0e0e97efc03bf12f8/rapidfuzz-3.14.1-cp312-cp312-win32.whl", hash = "sha256:9c83270e44a6ae7a39fc1d7e72a27486bccc1fa5f34e01572b1b90b019e6b566", size = 1711927, upload-time = "2025-09-08T21:06:34.669Z" }, + { url = "https://files.pythonhosted.org/packages/78/8d/199df0370133fe9f35bc72f3c037b53c93c5c1fc1e8d915cf7c1f6bb8557/rapidfuzz-3.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:e06664c7fdb51c708e082df08a6888fce4c5c416d7e3cc2fa66dd80eb76a149d", size = 1542045, upload-time = "2025-09-08T21:06:36.364Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c6/cc5d4bd1b16ea2657c80b745d8b1c788041a31fad52e7681496197b41562/rapidfuzz-3.14.1-cp312-cp312-win_arm64.whl", hash = "sha256:6c7c26025f7934a169a23dafea6807cfc3fb556f1dd49229faf2171e5d8101cc", size = 813170, upload-time = "2025-09-08T21:06:38.001Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f2/0024cc8eead108c4c29337abe133d72ddf3406ce9bbfbcfc110414a7ea07/rapidfuzz-3.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8d69f470d63ee824132ecd80b1974e1d15dd9df5193916901d7860cef081a260", size = 1926515, upload-time = "2025-09-08T21:06:39.834Z" }, + { url = "https://files.pythonhosted.org/packages/12/ae/6cb211f8930bea20fa989b23f31ee7f92940caaf24e3e510d242a1b28de4/rapidfuzz-3.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6f571d20152fc4833b7b5e781b36d5e4f31f3b5a596a3d53cf66a1bd4436b4f4", size = 1388431, upload-time = "2025-09-08T21:06:41.73Z" }, + { url = "https://files.pythonhosted.org/packages/39/88/bfec24da0607c39e5841ced5594ea1b907d20f83adf0e3ee87fa454a425b/rapidfuzz-3.14.1-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61d77e09b2b6bc38228f53b9ea7972a00722a14a6048be9a3672fb5cb08bad3a", size = 1375664, upload-time = "2025-09-08T21:06:43.737Z" }, + { url = "https://files.pythonhosted.org/packages/f4/43/9f282ba539e404bdd7052c7371d3aaaa1a9417979d2a1d8332670c7f385a/rapidfuzz-3.14.1-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8b41d95ef86a6295d353dc3bb6c80550665ba2c3bef3a9feab46074d12a9af8f", size = 1668113, upload-time = "2025-09-08T21:06:45.758Z" }, + { url = "https://files.pythonhosted.org/packages/7f/2f/0b3153053b1acca90969eb0867922ac8515b1a8a48706a3215c2db60e87c/rapidfuzz-3.14.1-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0591df2e856ad583644b40a2b99fb522f93543c65e64b771241dda6d1cfdc96b", size = 2212875, upload-time = "2025-09-08T21:06:47.447Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9b/623001dddc518afaa08ed1fbbfc4005c8692b7a32b0f08b20c506f17a770/rapidfuzz-3.14.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f277801f55b2f3923ef2de51ab94689a0671a4524bf7b611de979f308a54cd6f", size = 3161181, upload-time = "2025-09-08T21:06:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b7/d8404ed5ad56eb74463e5ebf0a14f0019d7eb0e65e0323f709fe72e0884c/rapidfuzz-3.14.1-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:893fdfd4f66ebb67f33da89eb1bd1674b7b30442fdee84db87f6cb9074bf0ce9", size = 1225495, upload-time = "2025-09-08T21:06:51.056Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6c/b96af62bc7615d821e3f6b47563c265fd7379d7236dfbc1cbbcce8beb1d2/rapidfuzz-3.14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fe2651258c1f1afa9b66f44bf82f639d5f83034f9804877a1bbbae2120539ad1", size = 2396294, upload-time = "2025-09-08T21:06:53.063Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b7/c60c9d22a7debed8b8b751f506a4cece5c22c0b05e47a819d6b47bc8c14e/rapidfuzz-3.14.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ace21f7a78519d8e889b1240489cd021c5355c496cb151b479b741a4c27f0a25", size = 2529629, upload-time = "2025-09-08T21:06:55.188Z" }, + { url = "https://files.pythonhosted.org/packages/25/94/a9ec7ccb28381f14de696ffd51c321974762f137679df986f5375d35264f/rapidfuzz-3.14.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cb5acf24590bc5e57027283b015950d713f9e4d155fda5cfa71adef3b3a84502", size = 2782960, upload-time = "2025-09-08T21:06:57.339Z" }, + { url = "https://files.pythonhosted.org/packages/68/80/04e5276d223060eca45250dbf79ea39940c0be8b3083661d58d57572c2c5/rapidfuzz-3.14.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:67ea46fa8cc78174bad09d66b9a4b98d3068e85de677e3c71ed931a1de28171f", size = 3298427, upload-time = "2025-09-08T21:06:59.319Z" }, + { url = "https://files.pythonhosted.org/packages/4a/63/24759b2a751562630b244e68ccaaf7a7525c720588fcc77c964146355aee/rapidfuzz-3.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:44e741d785de57d1a7bae03599c1cbc7335d0b060a35e60c44c382566e22782e", size = 4267736, upload-time = "2025-09-08T21:07:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/18/a4/73f1b1f7f44d55f40ffbffe85e529eb9d7e7f7b2ffc0931760eadd163995/rapidfuzz-3.14.1-cp313-cp313-win32.whl", hash = "sha256:b1fe6001baa9fa36bcb565e24e88830718f6c90896b91ceffcb48881e3adddbc", size = 1710515, upload-time = "2025-09-08T21:07:03.16Z" }, + { url = "https://files.pythonhosted.org/packages/6a/8b/a8fe5a6ee4d06fd413aaa9a7e0a23a8630c4b18501509d053646d18c2aa7/rapidfuzz-3.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:83b8cc6336709fa5db0579189bfd125df280a554af544b2dc1c7da9cdad7e44d", size = 1540081, upload-time = "2025-09-08T21:07:05.401Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fe/4b0ac16c118a2367d85450b45251ee5362661e9118a1cef88aae1765ffff/rapidfuzz-3.14.1-cp313-cp313-win_arm64.whl", hash = "sha256:cf75769662eadf5f9bd24e865c19e5ca7718e879273dce4e7b3b5824c4da0eb4", size = 812725, upload-time = "2025-09-08T21:07:07.148Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cb/1ad9a76d974d153783f8e0be8dbe60ec46488fac6e519db804e299e0da06/rapidfuzz-3.14.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d937dbeda71c921ef6537c6d41a84f1b8112f107589c9977059de57a1d726dd6", size = 1945173, upload-time = "2025-09-08T21:07:08.893Z" }, + { url = "https://files.pythonhosted.org/packages/d9/61/959ed7460941d8a81cbf6552b9c45564778a36cf5e5aa872558b30fc02b2/rapidfuzz-3.14.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7a2d80cc1a4fcc7e259ed4f505e70b36433a63fa251f1bb69ff279fe376c5efd", size = 1413949, upload-time = "2025-09-08T21:07:11.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a0/f46fca44457ca1f25f23cc1f06867454fc3c3be118cd10b552b0ab3e58a2/rapidfuzz-3.14.1-cp313-cp313t-win32.whl", hash = "sha256:40875e0c06f1a388f1cab3885744f847b557e0b1642dfc31ff02039f9f0823ef", size = 1760666, upload-time = "2025-09-08T21:07:12.884Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d0/7a5d9c04446f8b66882b0fae45b36a838cf4d31439b5d1ab48a9d17c8e57/rapidfuzz-3.14.1-cp313-cp313t-win_amd64.whl", hash = "sha256:876dc0c15552f3d704d7fb8d61bdffc872ff63bedf683568d6faad32e51bbce8", size = 1579760, upload-time = "2025-09-08T21:07:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/4e/aa/2c03ae112320d0746f2c869cae68c413f3fe3b6403358556f2b747559723/rapidfuzz-3.14.1-cp313-cp313t-win_arm64.whl", hash = "sha256:61458e83b0b3e2abc3391d0953c47d6325e506ba44d6a25c869c4401b3bc222c", size = 832088, upload-time = "2025-09-08T21:07:17.03Z" }, + { url = "https://files.pythonhosted.org/packages/d6/36/53debca45fbe693bd6181fb05b6a2fd561c87669edb82ec0d7c1961a43f0/rapidfuzz-3.14.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e84d9a844dc2e4d5c4cabd14c096374ead006583304333c14a6fbde51f612a44", size = 1926336, upload-time = "2025-09-08T21:07:18.809Z" }, + { url = "https://files.pythonhosted.org/packages/ae/32/b874f48609665fcfeaf16cbaeb2bbc210deef2b88e996c51cfc36c3eb7c3/rapidfuzz-3.14.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:40301b93b99350edcd02dbb22e37ca5f2a75d0db822e9b3c522da451a93d6f27", size = 1389653, upload-time = "2025-09-08T21:07:20.667Z" }, + { url = "https://files.pythonhosted.org/packages/97/25/f6c5a1ff4ec11edadacb270e70b8415f51fa2f0d5730c2c552b81651fbe3/rapidfuzz-3.14.1-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fedd5097a44808dddf341466866e5c57a18a19a336565b4ff50aa8f09eb528f6", size = 1380911, upload-time = "2025-09-08T21:07:22.584Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/d322202ef8fab463759b51ebfaa33228100510c82e6153bd7a922e150270/rapidfuzz-3.14.1-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e3e61c9e80d8c26709d8aa5c51fdd25139c81a4ab463895f8a567f8347b0548", size = 1673515, upload-time = "2025-09-08T21:07:24.417Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b9/6b2a97f4c6be96cac3749f32301b8cdf751ce5617b1c8934c96586a0662b/rapidfuzz-3.14.1-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da011a373722fac6e64687297a1d17dc8461b82cb12c437845d5a5b161bc24b9", size = 2219394, upload-time = "2025-09-08T21:07:26.402Z" }, + { url = "https://files.pythonhosted.org/packages/11/bf/afb76adffe4406e6250f14ce48e60a7eb05d4624945bd3c044cfda575fbc/rapidfuzz-3.14.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5967d571243cfb9ad3710e6e628ab68c421a237b76e24a67ac22ee0ff12784d6", size = 3163582, upload-time = "2025-09-08T21:07:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/e6405227560f61e956cb4c5de653b0f874751c5ada658d3532d6c1df328e/rapidfuzz-3.14.1-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:474f416cbb9099676de54aa41944c154ba8d25033ee460f87bb23e54af6d01c9", size = 1221116, upload-time = "2025-09-08T21:07:30.8Z" }, + { url = "https://files.pythonhosted.org/packages/55/e6/5b757e2e18de384b11d1daf59608453f0baf5d5d8d1c43e1a964af4dc19a/rapidfuzz-3.14.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ae2d57464b59297f727c4e201ea99ec7b13935f1f056c753e8103da3f2fc2404", size = 2402670, upload-time = "2025-09-08T21:07:32.702Z" }, + { url = "https://files.pythonhosted.org/packages/43/c4/d753a415fe54531aa882e288db5ed77daaa72e05c1a39e1cbac00d23024f/rapidfuzz-3.14.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:57047493a1f62f11354c7143c380b02f1b355c52733e6b03adb1cb0fe8fb8816", size = 2521659, upload-time = "2025-09-08T21:07:35.218Z" }, + { url = "https://files.pythonhosted.org/packages/cd/28/d4e7fe1515430db98f42deb794c7586a026d302fe70f0216b638d89cf10f/rapidfuzz-3.14.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:4acc20776f225ee37d69517a237c090b9fa7e0836a0b8bc58868e9168ba6ef6f", size = 2788552, upload-time = "2025-09-08T21:07:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/eab05473af7a2cafb4f3994bc6bf408126b8eec99a569aac6254ac757db4/rapidfuzz-3.14.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4373f914ff524ee0146919dea96a40a8200ab157e5a15e777a74a769f73d8a4a", size = 3306261, upload-time = "2025-09-08T21:07:39.624Z" }, + { url = "https://files.pythonhosted.org/packages/d1/31/2feb8dfcfcff6508230cd2ccfdde7a8bf988c6fda142fe9ce5d3eb15704d/rapidfuzz-3.14.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:37017b84953927807847016620d61251fe236bd4bcb25e27b6133d955bb9cafb", size = 4269522, upload-time = "2025-09-08T21:07:41.663Z" }, + { url = "https://files.pythonhosted.org/packages/a3/99/250538d73c8fbab60597c3d131a11ef2a634d38b44296ca11922794491ac/rapidfuzz-3.14.1-cp314-cp314-win32.whl", hash = "sha256:c8d1dd1146539e093b84d0805e8951475644af794ace81d957ca612e3eb31598", size = 1745018, upload-time = "2025-09-08T21:07:44.313Z" }, + { url = "https://files.pythonhosted.org/packages/c5/15/d50839d20ad0743aded25b08a98ffb872f4bfda4e310bac6c111fcf6ea1f/rapidfuzz-3.14.1-cp314-cp314-win_amd64.whl", hash = "sha256:f51c7571295ea97387bac4f048d73cecce51222be78ed808263b45c79c40a440", size = 1587666, upload-time = "2025-09-08T21:07:46.917Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ff/d73fec989213fb6f0b6f15ee4bbdf2d88b0686197951a06b036111cd1c7d/rapidfuzz-3.14.1-cp314-cp314-win_arm64.whl", hash = "sha256:01eab10ec90912d7d28b3f08f6c91adbaf93458a53f849ff70776ecd70dd7a7a", size = 835780, upload-time = "2025-09-08T21:07:49.256Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e7/f0a242687143cebd33a1fb165226b73bd9496d47c5acfad93de820a18fa8/rapidfuzz-3.14.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:60879fcae2f7618403c4c746a9a3eec89327d73148fb6e89a933b78442ff0669", size = 1945182, upload-time = "2025-09-08T21:07:51.84Z" }, + { url = "https://files.pythonhosted.org/packages/96/29/ca8a3f8525e3d0e7ab49cb927b5fb4a54855f794c9ecd0a0b60a6c96a05f/rapidfuzz-3.14.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f94d61e44db3fc95a74006a394257af90fa6e826c900a501d749979ff495d702", size = 1413946, upload-time = "2025-09-08T21:07:53.702Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ef/6fd10aa028db19c05b4ac7fe77f5613e4719377f630c709d89d7a538eea2/rapidfuzz-3.14.1-cp314-cp314t-win32.whl", hash = "sha256:93b6294a3ffab32a9b5f9b5ca048fa0474998e7e8bb0f2d2b5e819c64cb71ec7", size = 1795851, upload-time = "2025-09-08T21:07:55.76Z" }, + { url = "https://files.pythonhosted.org/packages/e4/30/acd29ebd906a50f9e0f27d5f82a48cf5e8854637b21489bd81a2459985cf/rapidfuzz-3.14.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6cb56b695421538fdbe2c0c85888b991d833b8637d2f2b41faa79cea7234c000", size = 1626748, upload-time = "2025-09-08T21:07:58.166Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f4/dfc7b8c46b1044a47f7ca55deceb5965985cff3193906cb32913121e6652/rapidfuzz-3.14.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7cd312c380d3ce9d35c3ec9726b75eee9da50e8a38e89e229a03db2262d3d96b", size = 853771, upload-time = "2025-09-08T21:08:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/05/c7/1b17347e30f2b50dd976c54641aa12003569acb1bdaabf45a5cc6f471c58/rapidfuzz-3.14.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4a21ccdf1bd7d57a1009030527ba8fae1c74bf832d0a08f6b67de8f5c506c96f", size = 1862602, upload-time = "2025-09-08T21:08:09.088Z" }, + { url = "https://files.pythonhosted.org/packages/09/cf/95d0dacac77eda22499991bd5f304c77c5965fb27348019a48ec3fe4a3f6/rapidfuzz-3.14.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:589fb0af91d3aff318750539c832ea1100dbac2c842fde24e42261df443845f6", size = 1339548, upload-time = "2025-09-08T21:08:11.059Z" }, + { url = "https://files.pythonhosted.org/packages/b6/58/f515c44ba8c6fa5daa35134b94b99661ced852628c5505ead07b905c3fc7/rapidfuzz-3.14.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a4f18092db4825f2517d135445015b40033ed809a41754918a03ef062abe88a0", size = 1513859, upload-time = "2025-09-08T21:08:13.07Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + [[package]] name = "requests" -version = "2.32.3" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1344,23 +2504,152 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] name = "rich" -version = "14.0.0" +version = "14.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/fe/75/af448d8e52bf1d8fa6a9d089ca6c07ff4453d86c65c145d0a300bb073b9b/rich-14.1.0.tar.gz", hash = "sha256:e497a48b844b0320d45007cdebfeaeed8db2a4f4bcf49f15e455cfc4af11eaa8", size = 224441, upload-time = "2025-07-25T07:32:58.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/30/3c4d035596d3cf444529e0b2953ad0466f6049528a879d27534700580395/rich-14.1.0-py3-none-any.whl", hash = "sha256:536f5f1785986d6dbdea3c75205c473f970777b4a0d6c6dd1b696aa05a3fa04f", size = 243368, upload-time = "2025-07-25T07:32:56.73Z" }, +] + +[[package]] +name = "ripgrep" +version = "14.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/27/53554c9307bc0647f123d4bf776a0f4d6a3083fb846e4f4abf999a29f220/ripgrep-14.1.0.tar.gz", hash = "sha256:17c866fdee1bf9e1c92ed1057bfd5f253c428ba73145553b59cbef8b4db6fca1", size = 464782, upload-time = "2024-08-10T21:47:35.637Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/f8/57521f4467167a19a32dcd6715cb6d912fa975dfcffe028f832a7a848592/ripgrep-14.1.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b542bf6da4aa2090665f7bee4760748500fc186b3ff7f4c32acd5790b40f7cd6", size = 2197631, upload-time = "2024-08-10T21:47:25.392Z" }, + { url = "https://files.pythonhosted.org/packages/a8/79/076193bfa1c5f2a955b887d7cc5dd3ec91f7ea2097a06b7e92e4ebcfb2ae/ripgrep-14.1.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4a01dbbfd98e13947a78cce80ef3d10e42b74563b42e160d6620a7429e50e779", size = 1949822, upload-time = "2024-08-10T21:33:53.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7d/0afdb9e8ff73ce1af3f3158fb7c88dde4247c60e23743b8e6c94e5ad55ad/ripgrep-14.1.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80404533ad72f4436030fcd84d49c1ba1e915d272465887ce1f94f4c65f351d9", size = 6896094, upload-time = "2024-08-10T21:47:13.246Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/b0984433dde43f8d4aa1634ec8f139e97794371e0b0eb4f42a2edeeda0df/ripgrep-14.1.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e73652f3609cf9fe99e0b181979fe3a5c7726b7f8992cba5d452aae4dca82ecd", size = 6676979, upload-time = "2024-08-10T21:47:15.466Z" }, + { url = "https://files.pythonhosted.org/packages/f6/15/fa99f30708c411ea15735872619e433246336fd9d1338ca7d7f63a994983/ripgrep-14.1.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a26a70bd3103984e855db748d1725d3e97ae896e84db93092816f62eab052b12", size = 6872870, upload-time = "2024-08-10T21:47:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/db/7e/0b85e5a4093885ba80b97054cdb3704bfd3f9af7194e5b052aa7674f5d27/ripgrep-14.1.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21deeafdbc537172a293d2978cfbe31cfcf0c65b66cf1fec11b14fd6860cfae3", size = 6878992, upload-time = "2024-08-10T21:47:17.562Z" }, + { url = "https://files.pythonhosted.org/packages/19/1a/fe85d13eacd4c9af23e1b786bef894e8e236cf4bdfefaf8909a28fdd524e/ripgrep-14.1.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:631a217d7093c5da1917b8e2c4bf71ad00bba2537d0c88a24ec28a6bc450444e", size = 8160851, upload-time = "2024-08-10T21:47:19.427Z" }, + { url = "https://files.pythonhosted.org/packages/54/e1/26a4e53e3d56d873c03d62253a11fe8042b92878fc27b161a15f7b46c2df/ripgrep-14.1.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2048f2b64a0bfe8c425df0dea6729d9795f2d8df6cda77bf76cf718439c41453", size = 6851971, upload-time = "2024-08-10T21:47:23.268Z" }, + { url = "https://files.pythonhosted.org/packages/10/d8/890eb71d464d8de0dc0dcf7ca42b1b59238c0187ac199ce56dd3cfd6c1ea/ripgrep-14.1.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:62a81311061660d7d3dd6ed99c699d09028186aaa1e26b436052f77c0925ea41", size = 9094460, upload-time = "2024-08-10T21:47:27.246Z" }, + { url = "https://files.pythonhosted.org/packages/cb/15/8dec67f2e484593b18efcc9cd5a70188ed5bfb1f0b0beb73c1be6e325156/ripgrep-14.1.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b3e49ee6548e9175cb15535b28c582d756272d4c9cc902fd5e326a00cb69737a", size = 6864721, upload-time = "2024-08-10T21:47:29.813Z" }, + { url = "https://files.pythonhosted.org/packages/da/6d/c2006b112435a1fbcb3c310bdaec82bf14afac7fc862b665f17f09b182c8/ripgrep-14.1.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c380549562662316d10fb1712856ed13b48d24d1b9d3c69d20aab610536cf5ab", size = 6959572, upload-time = "2024-08-10T21:47:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/83/63/8819227b1550e48df73cc35e24310a5c380da897d7acffbf534281c88ed6/ripgrep-14.1.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d20c74dfa4b1085712ffc6528eb10cdccf4022050539053a5f9203f3959b34e0", size = 8950227, upload-time = "2024-08-10T21:47:33.527Z" }, + { url = "https://files.pythonhosted.org/packages/1c/36/364b596290b70a41e85bf9f9720cf169aa792845fc9f0b1d3d2be3a58755/ripgrep-14.1.0-py3-none-win32.whl", hash = "sha256:1fe90507ea2f8a08c1b462043062d81800297a953dc58e25b1b28a3d9d505394", size = 1616108, upload-time = "2024-08-10T21:47:39.198Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a2/acde2fc0e343d2d750a3d0c64e96b30421cbf7e9474334dd6d8e3a33e8d0/ripgrep-14.1.0-py3-none-win_amd64.whl", hash = "sha256:85f991f1c268c81d7b9df44a1bfd3224fc69072d83872ac71e2d8ed5186ef156", size = 1742280, upload-time = "2024-08-10T21:47:37.31Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, ] [[package]] @@ -1377,39 +2666,53 @@ wheels = [ [[package]] name = "ruff" -version = "0.11.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/53/ae4857030d59286924a8bdb30d213d6ff22d8f0957e738d0289990091dd8/ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d", size = 4186707, upload-time = "2025-05-22T19:19:34.363Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/14/f2326676197bab099e2a24473158c21656fbf6a207c65f596ae15acb32b9/ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092", size = 10229049, upload-time = "2025-05-22T19:18:45.516Z" }, - { url = "https://files.pythonhosted.org/packages/9a/f3/bff7c92dd66c959e711688b2e0768e486bbca46b2f35ac319bb6cce04447/ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4", size = 11053601, upload-time = "2025-05-22T19:18:49.269Z" }, - { url = "https://files.pythonhosted.org/packages/e2/38/8e1a3efd0ef9d8259346f986b77de0f62c7a5ff4a76563b6b39b68f793b9/ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd", size = 10367421, upload-time = "2025-05-22T19:18:51.754Z" }, - { url = "https://files.pythonhosted.org/packages/b4/50/557ad9dd4fb9d0bf524ec83a090a3932d284d1a8b48b5906b13b72800e5f/ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6", size = 10581980, upload-time = "2025-05-22T19:18:54.011Z" }, - { url = "https://files.pythonhosted.org/packages/c4/b2/e2ed82d6e2739ece94f1bdbbd1d81b712d3cdaf69f0a1d1f1a116b33f9ad/ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4", size = 10089241, upload-time = "2025-05-22T19:18:56.041Z" }, - { url = "https://files.pythonhosted.org/packages/3d/9f/b4539f037a5302c450d7c695c82f80e98e48d0d667ecc250e6bdeb49b5c3/ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac", size = 11699398, upload-time = "2025-05-22T19:18:58.248Z" }, - { url = "https://files.pythonhosted.org/packages/61/fb/32e029d2c0b17df65e6eaa5ce7aea5fbeaed22dddd9fcfbbf5fe37c6e44e/ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709", size = 12427955, upload-time = "2025-05-22T19:19:00.981Z" }, - { url = "https://files.pythonhosted.org/packages/6e/e3/160488dbb11f18c8121cfd588e38095ba779ae208292765972f7732bfd95/ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8", size = 12069803, upload-time = "2025-05-22T19:19:03.258Z" }, - { url = "https://files.pythonhosted.org/packages/ff/16/3b006a875f84b3d0bff24bef26b8b3591454903f6f754b3f0a318589dcc3/ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b", size = 11242630, upload-time = "2025-05-22T19:19:05.871Z" }, - { url = "https://files.pythonhosted.org/packages/65/0d/0338bb8ac0b97175c2d533e9c8cdc127166de7eb16d028a43c5ab9e75abd/ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875", size = 11507310, upload-time = "2025-05-22T19:19:08.584Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bf/d7130eb26174ce9b02348b9f86d5874eafbf9f68e5152e15e8e0a392e4a3/ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1", size = 10441144, upload-time = "2025-05-22T19:19:13.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/f3/4be2453b258c092ff7b1761987cf0749e70ca1340cd1bfb4def08a70e8d8/ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81", size = 10081987, upload-time = "2025-05-22T19:19:15.821Z" }, - { url = "https://files.pythonhosted.org/packages/6c/6e/dfa4d2030c5b5c13db158219f2ec67bf333e8a7748dccf34cfa2a6ab9ebc/ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639", size = 11073922, upload-time = "2025-05-22T19:19:18.104Z" }, - { url = "https://files.pythonhosted.org/packages/ff/f4/f7b0b0c3d32b593a20ed8010fa2c1a01f2ce91e79dda6119fcc51d26c67b/ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345", size = 11568537, upload-time = "2025-05-22T19:19:20.889Z" }, - { url = "https://files.pythonhosted.org/packages/d2/46/0e892064d0adc18bcc81deed9aaa9942a27fd2cd9b1b7791111ce468c25f/ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112", size = 10536492, upload-time = "2025-05-22T19:19:23.642Z" }, - { url = "https://files.pythonhosted.org/packages/1b/d9/232e79459850b9f327e9f1dc9c047a2a38a6f9689e1ec30024841fc4416c/ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f", size = 11612562, upload-time = "2025-05-22T19:19:27.013Z" }, - { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951, upload-time = "2025-05-22T19:19:30.043Z" }, +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/33/c8e89216845615d14d2d42ba2bee404e7206a8db782f33400754f3799f05/ruff-0.13.1.tar.gz", hash = "sha256:88074c3849087f153d4bb22e92243ad4c1b366d7055f98726bc19aa08dc12d51", size = 5397987, upload-time = "2025-09-18T19:52:44.33Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/41/ca37e340938f45cfb8557a97a5c347e718ef34702546b174e5300dbb1f28/ruff-0.13.1-py3-none-linux_armv6l.whl", hash = "sha256:b2abff595cc3cbfa55e509d89439b5a09a6ee3c252d92020bd2de240836cf45b", size = 12304308, upload-time = "2025-09-18T19:51:56.253Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/ba378ef4129415066c3e1c80d84e539a0d52feb250685091f874804f28af/ruff-0.13.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4ee9f4249bf7f8bb3984c41bfaf6a658162cdb1b22e3103eabc7dd1dc5579334", size = 12937258, upload-time = "2025-09-18T19:52:00.184Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b6/ec5e4559ae0ad955515c176910d6d7c93edcbc0ed1a3195a41179c58431d/ruff-0.13.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c5da4af5f6418c07d75e6f3224e08147441f5d1eac2e6ce10dcce5e616a3bae", size = 12214554, upload-time = "2025-09-18T19:52:02.753Z" }, + { url = "https://files.pythonhosted.org/packages/70/d6/cb3e3b4f03b9b0c4d4d8f06126d34b3394f6b4d764912fe80a1300696ef6/ruff-0.13.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80524f84a01355a59a93cef98d804e2137639823bcee2931f5028e71134a954e", size = 12448181, upload-time = "2025-09-18T19:52:05.279Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ea/bf60cb46d7ade706a246cd3fb99e4cfe854efa3dfbe530d049c684da24ff/ruff-0.13.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff7f5ce8d7988767dd46a148192a14d0f48d1baea733f055d9064875c7d50389", size = 12104599, upload-time = "2025-09-18T19:52:07.497Z" }, + { url = "https://files.pythonhosted.org/packages/2d/3e/05f72f4c3d3a69e65d55a13e1dd1ade76c106d8546e7e54501d31f1dc54a/ruff-0.13.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c55d84715061f8b05469cdc9a446aa6c7294cd4bd55e86a89e572dba14374f8c", size = 13791178, upload-time = "2025-09-18T19:52:10.189Z" }, + { url = "https://files.pythonhosted.org/packages/81/e7/01b1fc403dd45d6cfe600725270ecc6a8f8a48a55bc6521ad820ed3ceaf8/ruff-0.13.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ac57fed932d90fa1624c946dc67a0a3388d65a7edc7d2d8e4ca7bddaa789b3b0", size = 14814474, upload-time = "2025-09-18T19:52:12.866Z" }, + { url = "https://files.pythonhosted.org/packages/fa/92/d9e183d4ed6185a8df2ce9faa3f22e80e95b5f88d9cc3d86a6d94331da3f/ruff-0.13.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c366a71d5b4f41f86a008694f7a0d75fe409ec298685ff72dc882f882d532e36", size = 14217531, upload-time = "2025-09-18T19:52:15.245Z" }, + { url = "https://files.pythonhosted.org/packages/3b/4a/6ddb1b11d60888be224d721e01bdd2d81faaf1720592858ab8bac3600466/ruff-0.13.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ea9d1b5ad3e7a83ee8ebb1229c33e5fe771e833d6d3dcfca7b77d95b060d38", size = 13265267, upload-time = "2025-09-18T19:52:17.649Z" }, + { url = "https://files.pythonhosted.org/packages/81/98/3f1d18a8d9ea33ef2ad508f0417fcb182c99b23258ec5e53d15db8289809/ruff-0.13.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f70202996055b555d3d74b626406476cc692f37b13bac8828acff058c9966a", size = 13243120, upload-time = "2025-09-18T19:52:20.332Z" }, + { url = "https://files.pythonhosted.org/packages/8d/86/b6ce62ce9c12765fa6c65078d1938d2490b2b1d9273d0de384952b43c490/ruff-0.13.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f8cff7a105dad631085d9505b491db33848007d6b487c3c1979dd8d9b2963783", size = 13443084, upload-time = "2025-09-18T19:52:23.032Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6e/af7943466a41338d04503fb5a81b2fd07251bd272f546622e5b1599a7976/ruff-0.13.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9761e84255443316a258dd7dfbd9bfb59c756e52237ed42494917b2577697c6a", size = 12295105, upload-time = "2025-09-18T19:52:25.263Z" }, + { url = "https://files.pythonhosted.org/packages/3f/97/0249b9a24f0f3ebd12f007e81c87cec6d311de566885e9309fcbac5b24cc/ruff-0.13.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3d376a88c3102ef228b102211ef4a6d13df330cb0f5ca56fdac04ccec2a99700", size = 12072284, upload-time = "2025-09-18T19:52:27.478Z" }, + { url = "https://files.pythonhosted.org/packages/f6/85/0b64693b2c99d62ae65236ef74508ba39c3febd01466ef7f354885e5050c/ruff-0.13.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cbefd60082b517a82c6ec8836989775ac05f8991715d228b3c1d86ccc7df7dae", size = 12970314, upload-time = "2025-09-18T19:52:30.212Z" }, + { url = "https://files.pythonhosted.org/packages/96/fc/342e9f28179915d28b3747b7654f932ca472afbf7090fc0c4011e802f494/ruff-0.13.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd16b9a5a499fe73f3c2ef09a7885cb1d97058614d601809d37c422ed1525317", size = 13422360, upload-time = "2025-09-18T19:52:32.676Z" }, + { url = "https://files.pythonhosted.org/packages/37/54/6177a0dc10bce6f43e392a2192e6018755473283d0cf43cc7e6afc182aea/ruff-0.13.1-py3-none-win32.whl", hash = "sha256:55e9efa692d7cb18580279f1fbb525146adc401f40735edf0aaeabd93099f9a0", size = 12178448, upload-time = "2025-09-18T19:52:35.545Z" }, + { url = "https://files.pythonhosted.org/packages/64/51/c6a3a33d9938007b8bdc8ca852ecc8d810a407fb513ab08e34af12dc7c24/ruff-0.13.1-py3-none-win_amd64.whl", hash = "sha256:3a3fb595287ee556de947183489f636b9f76a72f0fa9c028bdcabf5bab2cc5e5", size = 13286458, upload-time = "2025-09-18T19:52:38.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/04/afc078a12cf68592345b1e2d6ecdff837d286bac023d7a22c54c7a698c5b/ruff-0.13.1-py3-none-win_arm64.whl", hash = "sha256:c0bae9ffd92d54e03c2bf266f466da0a65e145f298ee5b5846ed435f6a00518a", size = 12437893, upload-time = "2025-09-18T19:52:41.283Z" }, ] [[package]] name = "s3transfer" -version = "0.13.0" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/5d/9dcc100abc6711e8247af5aa561fc07c4a046f72f659c3adea9a449e191a/s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177", size = 150232, upload-time = "2025-05-22T19:24:50.245Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/74/8d69dcb7a9efe8baa2046891735e5dfe433ad558ae23d9e3c14c633d1d58/s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125", size = 151547, upload-time = "2025-09-09T19:23:31.089Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/17/22bf8155aa0ea2305eefa3a6402e040df7ebe512d1310165eda1e233c3f8/s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be", size = 85152, upload-time = "2025-05-22T19:24:48.703Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/ae7ca09223a81a1d890b2557186ea015f6e0502e9b8cb8e1813f1d8cfa4e/s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456", size = 85712, upload-time = "2025-09-09T19:23:30.041Z" }, +] + +[[package]] +name = "screeninfo" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cython", marker = "sys_platform == 'darwin'" }, + { name = "pyobjc-framework-cocoa", marker = "sys_platform == 'darwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ec/bb/e69e5e628d43f118e0af4fc063c20058faa8635c95a1296764acc8167e27/screeninfo-0.8.1.tar.gz", hash = "sha256:9983076bcc7e34402a1a9e4d7dabf3729411fd2abb3f3b4be7eba73519cd2ed1", size = 10666, upload-time = "2022-09-09T11:35:23.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/bf/c5205d480307bef660e56544b9e3d7ff687da776abb30c9cb3f330887570/screeninfo-0.8.1-py3-none-any.whl", hash = "sha256:e97d6b173856edcfa3bd282f81deb528188aff14b11ec3e195584e7641be733c", size = 12907, upload-time = "2022-09-09T11:35:21.351Z" }, ] [[package]] @@ -1432,61 +2735,147 @@ wheels = [ [[package]] name = "soupsieve" -version = "2.7" +version = "2.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, ] [[package]] name = "sse-starlette" -version = "2.3.5" +version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, - { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/5f/28f45b1ff14bee871bacafd0a97213f7ec70e389939a80c60c0fb72a9fc9/sse_starlette-2.3.5.tar.gz", hash = "sha256:228357b6e42dcc73a427990e2b4a03c023e2495ecee82e14f07ba15077e334b2", size = 17511, upload-time = "2025-05-12T18:23:52.601Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/48/3e49cf0f64961656402c0023edbc51844fe17afe53ab50e958a6dbbbd499/sse_starlette-2.3.5-py3-none-any.whl", hash = "sha256:251708539a335570f10eaaa21d1848a10c42ee6dc3a9cf37ef42266cdb1c52a8", size = 10233, upload-time = "2025-05-12T18:23:50.722Z" }, + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, ] [[package]] name = "starlette" -version = "0.46.2" +version = "0.48.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "temporalio" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nexus-rpc" }, + { name = "protobuf" }, + { name = "types-protobuf" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/a7/622047cb731a104e455687793d724ed143925e9ea14b522ad5ce224e8d7f/temporalio-1.17.0.tar.gz", hash = "sha256:1ac8f1ade36fafe7110b979b6a16d89203e1f4fb9c874f2fe3b5d83c17b13244", size = 1734067, upload-time = "2025-09-03T01:27:05.205Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/9a/f6fd68e60afc67c402c0676c12baba3aa04d522c74f4123ed31b544d4159/temporalio-1.17.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7a86948c74a872b7f5ecb51c5d7e8013fdda4d6a220fe92185629342e94393e7", size = 12905249, upload-time = "2025-09-03T01:26:51.93Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7e/54cffb6a0ef4853f51bcefe5a74508940bad72a4442e50b3d52379a941c3/temporalio-1.17.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:00b34a986012a355bdadf0e7eb9e57e176f2e0b1d69ea4be9eb73c21672e7fd0", size = 12539749, upload-time = "2025-09-03T01:26:54.854Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f3/e4c829eb31bdb5eb14411ce7765b4ad8087794231110ff6188497859f0e6/temporalio-1.17.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36a84e52727e287e13777d86fa0bbda11ba6523f75a616b811cc9d799b37b98c", size = 12969855, upload-time = "2025-09-03T01:26:57.464Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/fef412e10408e35888815ac06c0c777cff1faa76157d861878d23a17edf0/temporalio-1.17.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617f37edce3db97cc7d2ff81c145a1b92c100f6e0e42207739271d10c2eea38e", size = 13165153, upload-time = "2025-09-03T01:27:00.285Z" }, + { url = "https://files.pythonhosted.org/packages/58/2d/01d164b78ea414f1e2554cd9959ffcf95f0c91a6d595f03128a70e433f57/temporalio-1.17.0-cp39-abi3-win_amd64.whl", hash = "sha256:f2724220fda1fd5948d917350ac25069c62624f46e53d4d6c6171baa75681145", size = 13178439, upload-time = "2025-09-03T01:27:02.855Z" }, +] + +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + +[[package]] +name = "termcolor" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/6c/3d75c196ac07ac8749600b60b03f4f6094d54e132c4d94ebac6ee0e0add0/termcolor-3.1.0.tar.gz", hash = "sha256:6a6dd7fbee581909eeec6a756cff1d7f7c376063b14e4a298dc4980309e55970", size = 14324, upload-time = "2025-04-30T11:37:53.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/bd/de8d508070629b6d84a30d01d57e4a65c69aa7f5abe7560b8fad3b50ea59/termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa", size = 7684, upload-time = "2025-04-30T11:37:52.382Z" }, +] + +[[package]] +name = "textual" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", extra = ["linkify", "plugins"] }, + { name = "platformdirs" }, + { name = "pygments" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/44/4b524b2f06e0fa6c4ede56a4e9af5edd5f3f83cf2eea5cb4fd0ce5bbe063/textual-6.1.0.tar.gz", hash = "sha256:cc89826ca2146c645563259320ca4ddc75d183c77afb7d58acdd46849df9144d", size = 1564786, upload-time = "2025-09-02T11:42:34.655Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/43/f91e041f239b54399310a99041faf33beae9a6e628671471d0fcd6276af4/textual-6.1.0-py3-none-any.whl", hash = "sha256:a3f5e6710404fcdc6385385db894699282dccf2ad50103cebc677403c1baadd5", size = 707840, upload-time = "2025-09-02T11:42:32.746Z" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846, upload-time = "2025-04-13T13:56:17.942Z" } + +[[package]] +name = "textual-dev" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "click" }, + { name = "msgpack" }, + { name = "textual" }, + { name = "textual-serve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/d3/ed0b20f6de0af1b7062c402d59d256029c0daa055ad9e04c27471b450cdd/textual_dev-1.7.0.tar.gz", hash = "sha256:bf1a50eaaff4cd6a863535dd53f06dbbd62617c371604f66f56de3908220ccd5", size = 25935, upload-time = "2024-11-18T16:59:47.924Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037, upload-time = "2025-04-13T13:56:16.21Z" }, + { url = "https://files.pythonhosted.org/packages/50/4b/3c1eb9cbc39f2f28d27e10ef2fe42bfe0cf3c2f8445a454c124948d6169b/textual_dev-1.7.0-py3-none-any.whl", hash = "sha256:a93a846aeb6a06edb7808504d9c301565f7f4bf2e7046d56583ed755af356c8d", size = 27221, upload-time = "2024-11-18T16:59:46.833Z" }, +] + +[[package]] +name = "textual-serve" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aiohttp-jinja2" }, + { name = "jinja2" }, + { name = "rich" }, + { name = "textual" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/41/09d5695b050d592ff58422be2ca5c9915787f59ff576ca91d9541d315406/textual_serve-1.1.2.tar.gz", hash = "sha256:0ccaf9b9df9c08d4b2d7a0887cad3272243ba87f68192c364f4bed5b683e4bd4", size = 892959, upload-time = "2025-04-16T12:11:41.746Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/fb/0006f86960ab8a2f69c9f496db657992000547f94f53a2f483fd611b4bd2/textual_serve-1.1.2-py3-none-any.whl", hash = "sha256:147d56b165dccf2f387203fe58d43ce98ccad34003fe3d38e6d2bc8903861865", size = 447326, upload-time = "2025-04-16T12:11:43.176Z" }, ] [[package]] name = "tokenizers" -version = "0.21.1" +version = "0.22.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/92/76/5ac0c97f1117b91b7eb7323dcd61af80d72f790b4df71249a7850c195f30/tokenizers-0.21.1.tar.gz", hash = "sha256:a1bb04dc5b448985f86ecd4b05407f5a8d97cb2c0532199b2a302a604a0165ab", size = 343256, upload-time = "2025-03-13T10:51:18.189Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/1f/328aee25f9115bf04262e8b4e5a2050b7b7cf44b59c74e982db7270c7f30/tokenizers-0.21.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e78e413e9e668ad790a29456e677d9d3aa50a9ad311a40905d6861ba7692cf41", size = 2780767, upload-time = "2025-03-13T10:51:09.459Z" }, - { url = "https://files.pythonhosted.org/packages/ae/1a/4526797f3719b0287853f12c5ad563a9be09d446c44ac784cdd7c50f76ab/tokenizers-0.21.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:cd51cd0a91ecc801633829fcd1fda9cf8682ed3477c6243b9a095539de4aecf3", size = 2650555, upload-time = "2025-03-13T10:51:07.692Z" }, - { url = "https://files.pythonhosted.org/packages/4d/7a/a209b29f971a9fdc1da86f917fe4524564924db50d13f0724feed37b2a4d/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28da6b72d4fb14ee200a1bd386ff74ade8992d7f725f2bde2c495a9a98cf4d9f", size = 2937541, upload-time = "2025-03-13T10:50:56.679Z" }, - { url = "https://files.pythonhosted.org/packages/3c/1e/b788b50ffc6191e0b1fc2b0d49df8cff16fe415302e5ceb89f619d12c5bc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34d8cfde551c9916cb92014e040806122295a6800914bab5865deb85623931cf", size = 2819058, upload-time = "2025-03-13T10:50:59.525Z" }, - { url = "https://files.pythonhosted.org/packages/36/aa/3626dfa09a0ecc5b57a8c58eeaeb7dd7ca9a37ad9dd681edab5acd55764c/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaa852d23e125b73d283c98f007e06d4595732104b65402f46e8ef24b588d9f8", size = 3133278, upload-time = "2025-03-13T10:51:04.678Z" }, - { url = "https://files.pythonhosted.org/packages/a4/4d/8fbc203838b3d26269f944a89459d94c858f5b3f9a9b6ee9728cdcf69161/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a21a15d5c8e603331b8a59548bbe113564136dc0f5ad8306dd5033459a226da0", size = 3144253, upload-time = "2025-03-13T10:51:01.261Z" }, - { url = "https://files.pythonhosted.org/packages/d8/1b/2bd062adeb7c7511b847b32e356024980c0ffcf35f28947792c2d8ad2288/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2fdbd4c067c60a0ac7eca14b6bd18a5bebace54eb757c706b47ea93204f7a37c", size = 3398225, upload-time = "2025-03-13T10:51:03.243Z" }, - { url = "https://files.pythonhosted.org/packages/8a/63/38be071b0c8e06840bc6046991636bcb30c27f6bb1e670f4f4bc87cf49cc/tokenizers-0.21.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd9a0061e403546f7377df940e866c3e678d7d4e9643d0461ea442b4f89e61a", size = 3038874, upload-time = "2025-03-13T10:51:06.235Z" }, - { url = "https://files.pythonhosted.org/packages/ec/83/afa94193c09246417c23a3c75a8a0a96bf44ab5630a3015538d0c316dd4b/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:db9484aeb2e200c43b915a1a0150ea885e35f357a5a8fabf7373af333dcc8dbf", size = 9014448, upload-time = "2025-03-13T10:51:10.927Z" }, - { url = "https://files.pythonhosted.org/packages/ae/b3/0e1a37d4f84c0f014d43701c11eb8072704f6efe8d8fc2dcdb79c47d76de/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed248ab5279e601a30a4d67bdb897ecbe955a50f1e7bb62bd99f07dd11c2f5b6", size = 8937877, upload-time = "2025-03-13T10:51:12.688Z" }, - { url = "https://files.pythonhosted.org/packages/ac/33/ff08f50e6d615eb180a4a328c65907feb6ded0b8f990ec923969759dc379/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9ac78b12e541d4ce67b4dfd970e44c060a2147b9b2a21f509566d556a509c67d", size = 9186645, upload-time = "2025-03-13T10:51:14.723Z" }, - { url = "https://files.pythonhosted.org/packages/5f/aa/8ae85f69a9f6012c6f8011c6f4aa1c96154c816e9eea2e1b758601157833/tokenizers-0.21.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e5a69c1a4496b81a5ee5d2c1f3f7fbdf95e90a0196101b0ee89ed9956b8a168f", size = 9384380, upload-time = "2025-03-13T10:51:16.526Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5b/a5d98c89f747455e8b7a9504910c865d5e51da55e825a7ae641fb5ff0a58/tokenizers-0.21.1-cp39-abi3-win32.whl", hash = "sha256:1039a3a5734944e09de1d48761ade94e00d0fa760c0e0551151d4dd851ba63e3", size = 2239506, upload-time = "2025-03-13T10:51:20.643Z" }, - { url = "https://files.pythonhosted.org/packages/e6/b6/072a8e053ae600dcc2ac0da81a23548e3b523301a442a6ca900e92ac35be/tokenizers-0.21.1-cp39-abi3-win_amd64.whl", hash = "sha256:0f0dcbcc9f6e13e675a66d7a5f2f225a736745ce484c1a4e07476a89ccdad382", size = 2435481, upload-time = "2025-03-13T10:51:19.243Z" }, + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, ] [[package]] @@ -1540,25 +2929,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] +[[package]] +name = "types-protobuf" +version = "6.32.1.20250918" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/5a/bd06c2dbb77ebd4ea764473c9c4c014c7ba94432192cb965a274f8544b9d/types_protobuf-6.32.1.20250918.tar.gz", hash = "sha256:44ce0ae98475909ca72379946ab61a4435eec2a41090821e713c17e8faf5b88f", size = 63780, upload-time = "2025-09-18T02:50:39.391Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/5a/8d93d4f4af5dc3dd62aa4f020deae746b34b1d94fb5bee1f776c6b7e9d6c/types_protobuf-6.32.1.20250918-py3-none-any.whl", hash = "sha256:22ba6133d142d11cc34d3788ad6dead2732368ebb0406eaa7790ea6ae46c8d0b", size = 77885, upload-time = "2025-09-18T02:50:38.028Z" }, +] + [[package]] name = "types-requests" -version = "2.32.0.20250515" +version = "2.32.4.20250913" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/c1/cdc4f9b8cfd9130fbe6276db574f114541f4231fcc6fb29648289e6e3390/types_requests-2.32.0.20250515.tar.gz", hash = "sha256:09c8b63c11318cb2460813871aaa48b671002e59fda67ca909e9883777787581", size = 23012, upload-time = "2025-05-15T03:04:31.817Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/0f/68a997c73a129287785f418c1ebb6004f81e46b53b3caba88c0e03fcd04a/types_requests-2.32.0.20250515-py3-none-any.whl", hash = "sha256:f8eba93b3a892beee32643ff836993f15a785816acca21ea0ffa006f05ef0fb2", size = 20635, upload-time = "2025-05-15T03:04:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, ] [[package]] name = "typing-extensions" -version = "4.13.2" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] @@ -1573,36 +2971,64 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, ] +[[package]] +name = "ua-parser" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ua-parser-builtins" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/0e/ed98be735bc89d5040e0c60f5620d0b8c04e9e7da99ed1459e8050e90a77/ua_parser-1.0.1.tar.gz", hash = "sha256:f9d92bf19d4329019cef91707aecc23c6d65143ad7e29a233f0580fb0d15547d", size = 728106, upload-time = "2025-02-01T14:13:32.508Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/37/be6dfbfa45719aa82c008fb4772cfe5c46db765a2ca4b6f524a1fdfee4d7/ua_parser-1.0.1-py3-none-any.whl", hash = "sha256:b059f2cb0935addea7e551251cbbf42e9a8872f86134163bc1a4f79e0945ffea", size = 31410, upload-time = "2025-02-01T14:13:28.458Z" }, +] + +[[package]] +name = "ua-parser-builtins" +version = "0.18.0.post1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/d3/13adff37f15489c784cc7669c35a6c3bf94b87540229eedf52ef2a1d0175/ua_parser_builtins-0.18.0.post1-py3-none-any.whl", hash = "sha256:eb4f93504040c3a990a6b0742a2afd540d87d7f9f05fd66e94c101db1564674d", size = 86077, upload-time = "2024-12-05T18:44:36.732Z" }, +] + +[[package]] +name = "uc-micro-py" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043, upload-time = "2024-02-09T16:52:01.654Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" }, +] + [[package]] name = "urllib3" -version = "2.4.0" +version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]] name = "uvicorn" -version = "0.34.2" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815, upload-time = "2025-04-19T06:02:50.101Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483, upload-time = "2025-04-19T06:02:48.42Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -1611,17 +3037,6 @@ version = "15.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/da/6462a9f510c0c49837bbc9345aca92d767a56c1fb2939e1579df1e1cdcf7/websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b", size = 175423, upload-time = "2025-03-05T20:01:35.363Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9f/9d11c1a4eb046a9e106483b9ff69bce7ac880443f00e5ce64261b47b07e7/websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205", size = 173080, upload-time = "2025-03-05T20:01:37.304Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4f/b462242432d93ea45f297b6179c7333dd0402b855a912a04e7fc61c0d71f/websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a", size = 173329, upload-time = "2025-03-05T20:01:39.668Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0c/6afa1f4644d7ed50284ac59cc70ef8abd44ccf7d45850d989ea7310538d0/websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e", size = 182312, upload-time = "2025-03-05T20:01:41.815Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d4/ffc8bd1350b229ca7a4db2a3e1c482cf87cea1baccd0ef3e72bc720caeec/websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf", size = 181319, upload-time = "2025-03-05T20:01:43.967Z" }, - { url = "https://files.pythonhosted.org/packages/97/3a/5323a6bb94917af13bbb34009fac01e55c51dfde354f63692bf2533ffbc2/websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb", size = 181631, upload-time = "2025-03-05T20:01:46.104Z" }, - { url = "https://files.pythonhosted.org/packages/a6/cc/1aeb0f7cee59ef065724041bb7ed667b6ab1eeffe5141696cccec2687b66/websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d", size = 182016, upload-time = "2025-03-05T20:01:47.603Z" }, - { url = "https://files.pythonhosted.org/packages/79/f9/c86f8f7af208e4161a7f7e02774e9d0a81c632ae76db2ff22549e1718a51/websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9", size = 181426, upload-time = "2025-03-05T20:01:48.949Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b9/828b0bc6753db905b91df6ae477c0b14a141090df64fb17f8a9d7e3516cf/websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c", size = 181360, upload-time = "2025-03-05T20:01:50.938Z" }, - { url = "https://files.pythonhosted.org/packages/89/fb/250f5533ec468ba6327055b7d98b9df056fb1ce623b8b6aaafb30b55d02e/websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256", size = 176388, upload-time = "2025-03-05T20:01:52.213Z" }, - { url = "https://files.pythonhosted.org/packages/1c/46/aca7082012768bb98e5608f01658ff3ac8437e563eca41cf068bd5849a5e/websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41", size = 176830, upload-time = "2025-03-05T20:01:53.922Z" }, { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, @@ -1655,84 +3070,155 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/d40f779fa16f74d3468357197af8d6ad07e7c5a27ea1ca74ceb38986f77a/websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3", size = 173109, upload-time = "2025-03-05T20:03:17.769Z" }, - { url = "https://files.pythonhosted.org/packages/bc/cd/5b887b8585a593073fd92f7c23ecd3985cd2c3175025a91b0d69b0551372/websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1", size = 173343, upload-time = "2025-03-05T20:03:19.094Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ae/d34f7556890341e900a95acf4886833646306269f899d58ad62f588bf410/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475", size = 174599, upload-time = "2025-03-05T20:03:21.1Z" }, - { url = "https://files.pythonhosted.org/packages/71/e6/5fd43993a87db364ec60fc1d608273a1a465c0caba69176dd160e197ce42/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9", size = 174207, upload-time = "2025-03-05T20:03:23.221Z" }, - { url = "https://files.pythonhosted.org/packages/2b/fb/c492d6daa5ec067c2988ac80c61359ace5c4c674c532985ac5a123436cec/websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04", size = 174155, upload-time = "2025-03-05T20:03:25.321Z" }, - { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] [[package]] name = "wrapt" -version = "1.17.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531, upload-time = "2025-01-14T10:35:45.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/d1/1daec934997e8b160040c78d7b31789f19b122110a75eca3d4e8da0049e1/wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984", size = 53307, upload-time = "2025-01-14T10:33:13.616Z" }, - { url = "https://files.pythonhosted.org/packages/1b/7b/13369d42651b809389c1a7153baa01d9700430576c81a2f5c5e460df0ed9/wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22", size = 38486, upload-time = "2025-01-14T10:33:15.947Z" }, - { url = "https://files.pythonhosted.org/packages/62/bf/e0105016f907c30b4bd9e377867c48c34dc9c6c0c104556c9c9126bd89ed/wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7", size = 38777, upload-time = "2025-01-14T10:33:17.462Z" }, - { url = "https://files.pythonhosted.org/packages/27/70/0f6e0679845cbf8b165e027d43402a55494779295c4b08414097b258ac87/wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c", size = 83314, upload-time = "2025-01-14T10:33:21.282Z" }, - { url = "https://files.pythonhosted.org/packages/0f/77/0576d841bf84af8579124a93d216f55d6f74374e4445264cb378a6ed33eb/wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72", size = 74947, upload-time = "2025-01-14T10:33:24.414Z" }, - { url = "https://files.pythonhosted.org/packages/90/ec/00759565518f268ed707dcc40f7eeec38637d46b098a1f5143bff488fe97/wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061", size = 82778, upload-time = "2025-01-14T10:33:26.152Z" }, - { url = "https://files.pythonhosted.org/packages/f8/5a/7cffd26b1c607b0b0c8a9ca9d75757ad7620c9c0a9b4a25d3f8a1480fafc/wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2", size = 81716, upload-time = "2025-01-14T10:33:27.372Z" }, - { url = "https://files.pythonhosted.org/packages/7e/09/dccf68fa98e862df7e6a60a61d43d644b7d095a5fc36dbb591bbd4a1c7b2/wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c", size = 74548, upload-time = "2025-01-14T10:33:28.52Z" }, - { url = "https://files.pythonhosted.org/packages/b7/8e/067021fa3c8814952c5e228d916963c1115b983e21393289de15128e867e/wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62", size = 81334, upload-time = "2025-01-14T10:33:29.643Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0d/9d4b5219ae4393f718699ca1c05f5ebc0c40d076f7e65fd48f5f693294fb/wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563", size = 36427, upload-time = "2025-01-14T10:33:30.832Z" }, - { url = "https://files.pythonhosted.org/packages/72/6a/c5a83e8f61aec1e1aeef939807602fb880e5872371e95df2137142f5c58e/wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f", size = 38774, upload-time = "2025-01-14T10:33:32.897Z" }, - { url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308, upload-time = "2025-01-14T10:33:33.992Z" }, - { url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488, upload-time = "2025-01-14T10:33:35.264Z" }, - { url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776, upload-time = "2025-01-14T10:33:38.28Z" }, - { url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776, upload-time = "2025-01-14T10:33:40.678Z" }, - { url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420, upload-time = "2025-01-14T10:33:41.868Z" }, - { url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199, upload-time = "2025-01-14T10:33:43.598Z" }, - { url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307, upload-time = "2025-01-14T10:33:48.499Z" }, - { url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025, upload-time = "2025-01-14T10:33:51.191Z" }, - { url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879, upload-time = "2025-01-14T10:33:52.328Z" }, - { url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419, upload-time = "2025-01-14T10:33:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773, upload-time = "2025-01-14T10:33:56.323Z" }, - { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799, upload-time = "2025-01-14T10:33:57.4Z" }, - { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821, upload-time = "2025-01-14T10:33:59.334Z" }, - { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919, upload-time = "2025-01-14T10:34:04.093Z" }, - { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721, upload-time = "2025-01-14T10:34:07.163Z" }, - { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899, upload-time = "2025-01-14T10:34:09.82Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222, upload-time = "2025-01-14T10:34:11.258Z" }, - { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707, upload-time = "2025-01-14T10:34:12.49Z" }, - { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685, upload-time = "2025-01-14T10:34:15.043Z" }, - { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567, upload-time = "2025-01-14T10:34:16.563Z" }, - { url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672, upload-time = "2025-01-14T10:34:17.727Z" }, - { url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865, upload-time = "2025-01-14T10:34:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b9/0ffd557a92f3b11d4c5d5e0c5e4ad057bd9eb8586615cdaf901409920b14/wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125", size = 53800, upload-time = "2025-01-14T10:34:21.571Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ef/8be90a0b7e73c32e550c73cfb2fa09db62234227ece47b0e80a05073b375/wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998", size = 38824, upload-time = "2025-01-14T10:34:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/36/89/0aae34c10fe524cce30fe5fc433210376bce94cf74d05b0d68344c8ba46e/wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5", size = 38920, upload-time = "2025-01-14T10:34:25.386Z" }, - { url = "https://files.pythonhosted.org/packages/3b/24/11c4510de906d77e0cfb5197f1b1445d4fec42c9a39ea853d482698ac681/wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8", size = 88690, upload-time = "2025-01-14T10:34:28.058Z" }, - { url = "https://files.pythonhosted.org/packages/71/d7/cfcf842291267bf455b3e266c0c29dcb675b5540ee8b50ba1699abf3af45/wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6", size = 80861, upload-time = "2025-01-14T10:34:29.167Z" }, - { url = "https://files.pythonhosted.org/packages/d5/66/5d973e9f3e7370fd686fb47a9af3319418ed925c27d72ce16b791231576d/wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc", size = 89174, upload-time = "2025-01-14T10:34:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d3/8e17bb70f6ae25dabc1aaf990f86824e4fd98ee9cadf197054e068500d27/wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2", size = 86721, upload-time = "2025-01-14T10:34:32.91Z" }, - { url = "https://files.pythonhosted.org/packages/6f/54/f170dfb278fe1c30d0ff864513cff526d624ab8de3254b20abb9cffedc24/wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b", size = 79763, upload-time = "2025-01-14T10:34:34.903Z" }, - { url = "https://files.pythonhosted.org/packages/4a/98/de07243751f1c4a9b15c76019250210dd3486ce098c3d80d5f729cba029c/wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504", size = 87585, upload-time = "2025-01-14T10:34:36.13Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f0/13925f4bd6548013038cdeb11ee2cbd4e37c30f8bfd5db9e5a2a370d6e20/wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a", size = 36676, upload-time = "2025-01-14T10:34:37.962Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ae/743f16ef8c2e3628df3ddfd652b7d4c555d12c84b53f3d8218498f4ade9b/wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845", size = 38871, upload-time = "2025-01-14T10:34:39.13Z" }, - { url = "https://files.pythonhosted.org/packages/3d/bc/30f903f891a82d402ffb5fda27ec1d621cc97cb74c16fea0b6141f1d4e87/wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192", size = 56312, upload-time = "2025-01-14T10:34:40.604Z" }, - { url = "https://files.pythonhosted.org/packages/8a/04/c97273eb491b5f1c918857cd26f314b74fc9b29224521f5b83f872253725/wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b", size = 40062, upload-time = "2025-01-14T10:34:45.011Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ca/3b7afa1eae3a9e7fefe499db9b96813f41828b9fdb016ee836c4c379dadb/wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0", size = 40155, upload-time = "2025-01-14T10:34:47.25Z" }, - { url = "https://files.pythonhosted.org/packages/89/be/7c1baed43290775cb9030c774bc53c860db140397047cc49aedaf0a15477/wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306", size = 113471, upload-time = "2025-01-14T10:34:50.934Z" }, - { url = "https://files.pythonhosted.org/packages/32/98/4ed894cf012b6d6aae5f5cc974006bdeb92f0241775addad3f8cd6ab71c8/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb", size = 101208, upload-time = "2025-01-14T10:34:52.297Z" }, - { url = "https://files.pythonhosted.org/packages/ea/fd/0c30f2301ca94e655e5e057012e83284ce8c545df7661a78d8bfca2fac7a/wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681", size = 109339, upload-time = "2025-01-14T10:34:53.489Z" }, - { url = "https://files.pythonhosted.org/packages/75/56/05d000de894c4cfcb84bcd6b1df6214297b8089a7bd324c21a4765e49b14/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6", size = 110232, upload-time = "2025-01-14T10:34:55.327Z" }, - { url = "https://files.pythonhosted.org/packages/53/f8/c3f6b2cf9b9277fb0813418e1503e68414cd036b3b099c823379c9575e6d/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6", size = 100476, upload-time = "2025-01-14T10:34:58.055Z" }, - { url = "https://files.pythonhosted.org/packages/a7/b1/0bb11e29aa5139d90b770ebbfa167267b1fc548d2302c30c8f7572851738/wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f", size = 106377, upload-time = "2025-01-14T10:34:59.3Z" }, - { url = "https://files.pythonhosted.org/packages/6a/e1/0122853035b40b3f333bbb25f1939fc1045e21dd518f7f0922b60c156f7c/wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555", size = 37986, upload-time = "2025-01-14T10:35:00.498Z" }, - { url = "https://files.pythonhosted.org/packages/09/5e/1655cf481e079c1f22d0cabdd4e51733679932718dc23bf2db175f329b76/wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c", size = 40750, upload-time = "2025-01-14T10:35:03.378Z" }, - { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594, upload-time = "2025-01-14T10:35:44.018Z" }, +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, ] [[package]] name = "zipp" -version = "3.22.0" +version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/b6/7b3d16792fdf94f146bed92be90b4eb4563569eca91513c8609aebf0c167/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5", size = 25257, upload-time = "2025-05-26T14:46:32.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/da/f64669af4cae46f17b90798a827519ce3737d31dbafad65d391e49643dc4/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343", size = 9796, upload-time = "2025-05-26T14:46:30.775Z" }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ]