From 55f623a0065a933fc2f926c8b33498cddf6f58e2 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Mon, 9 Mar 2026 16:36:26 +1100 Subject: [PATCH 01/17] Add TTT-Discover autoresearch integration --- .gitignore | 3 +- .python-version | 2 +- README.md | 55 +- configs/ttt_discover_autoresearch.yaml | 23 + pyproject.toml | 3 +- run_ttt_discover.py | 5 + tests/fixtures/fake_train.py | 36 + tests/test_cli_integration.py | 124 ++ tests/test_env_smoke.py | 55 + tests/test_reward.py | 81 + tests/test_runner.py | 85 + ttt_autoresearch/__init__.py | 17 + ttt_autoresearch/cli.py | 114 ++ ttt_autoresearch/config.py | 211 +++ ttt_autoresearch/discover_compat.py | 88 ++ ttt_autoresearch/env.py | 294 ++++ ttt_autoresearch/reward.py | 144 ++ ttt_autoresearch/runner.py | 288 ++++ uv.lock | 1998 ++++++++++++++++++------ 19 files changed, 3141 insertions(+), 485 deletions(-) create mode 100644 configs/ttt_discover_autoresearch.yaml create mode 100644 run_ttt_discover.py create mode 100644 tests/fixtures/fake_train.py create mode 100644 tests/test_cli_integration.py create mode 100644 tests/test_env_smoke.py create mode 100644 tests/test_reward.py create mode 100644 tests/test_runner.py create mode 100644 ttt_autoresearch/__init__.py create mode 100644 ttt_autoresearch/cli.py create mode 100644 ttt_autoresearch/config.py create mode 100644 ttt_autoresearch/discover_compat.py create mode 100644 ttt_autoresearch/env.py create mode 100644 ttt_autoresearch/reward.py create mode 100644 ttt_autoresearch/runner.py diff --git a/.gitignore b/.gitignore index 99c30f52..ffe156a4 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ AGENTS.md # Experimental code/artifacts dev/ - # Results file results.tsv +runs/ +.pytest_cache/ diff --git a/.python-version b/.python-version index c8cfe395..2c073331 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.10 +3.11 diff --git a/README.md b/README.md index 8459259a..f4720dc6 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ By design, training runs for a **fixed 5-minute time budget** (wall clock, exclu ## Quick start -**Requirements:** A single NVIDIA GPU (tested on H100), Python 3.10+, [uv](https://docs.astral.sh/uv/). +**Requirements:** A single NVIDIA GPU (tested on H100), Python 3.11+, [uv](https://docs.astral.sh/uv/). ```bash @@ -47,6 +47,57 @@ Hi have a look at program.md and let's kick off a new experiment! let's do the s The `program.md` file is essentially a super lightweight "skill". +## TTT-Discover mode + +This repo also includes a thin adapter that uses [TTT-Discover](https://github.com/test-time-training/discover) as the outer RL engine for `autoresearch`. + +Because upstream `ttt-discover` depends on Python 3.11+, the integrated repo now targets Python 3.11+ for both the original and TTT workflows. + +- The outer model proposes full replacements for `train.py`. +- Each candidate `train.py` is executed in an isolated workspace. +- The inner run's `val_bpb` becomes the reward signal for the outer model. +- The implementation keeps the `discover` RL recipe intact: online LoRA updates, grouped rollouts, KL control, and state reuse through the upstream sampler. + +### Quickstart + +```bash +# 1. Install dependencies, including the pinned ttt-discover dependency +uv sync + +# 2. Prepare data and tokenizer once +uv run prepare.py + +# 3. Launch TTT-Discover outer-loop RL +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml +``` + +The default outer model target is `Qwen/Qwen3.5-35B-A3B`. To swap models, edit one field in [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml): + +```yaml +model_name: Qwen/Qwen3.5-35B-A3B +``` + +For example: + +```yaml +model_name: openai/gpt-oss-120b +``` + +Outputs are written under `runs//`: + +- `baseline.json` for the original `train.py` +- `history.jsonl` for every accepted and rejected candidate +- `best/train.py` for the best discovered replacement +- `best/metrics.json` for the best run metadata +- `candidates/` for per-candidate isolated workspaces and logs + +### Config notes + +- `model_name` is fully configurable and passed through to the installed `discover` backend. +- `provider` and `api_base` can be set in the YAML or overridden on the CLI. +- `baseline_command_override` and `candidate_command_override` let you swap the execution command without changing code. +- `run_ttt_discover.py` uses the upstream `discover` trainer stack directly, but bypasses the public `discover()` model-name guard so non-GPT-OSS models such as Qwen can be used without changing the RL optimization recipe. + ## Project structure ``` @@ -54,6 +105,8 @@ prepare.py — constants, data prep + runtime utilities (do not modify) train.py — model, optimizer, training loop (agent modifies this) program.md — agent instructions pyproject.toml — dependencies +run_ttt_discover.py — TTT-Discover entrypoint for outer-loop RL +ttt_autoresearch/ — thin autoresearch environment/reward adapter for discover ``` ## Design choices diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml new file mode 100644 index 00000000..fcd1f377 --- /dev/null +++ b/configs/ttt_discover_autoresearch.yaml @@ -0,0 +1,23 @@ +model_name: Qwen/Qwen3.5-35B-A3B +provider: null +api_base: null +max_steps: 8 +samples_per_step: 4 +temperature: 1.0 +timeout_sec: 2700 +run_dir: null +data_path: null +baseline_command_override: null +candidate_command_override: null +experiment_name: autoresearch-ttt-discover +renderer_name: null +learning_rate: 0.00004 +lora_rank: 32 +kl_penalty_coef: 0.1 +phase1_max_tokens: 26000 +save_every: 2 +wandb_project: autoresearch-ttt-discover +num_cpus_per_task: 0 +eval_timeout: 2700 +local_model_path: null +keep_history: 6 diff --git a/pyproject.toml b/pyproject.toml index 94ae3298..66d6b53f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "autoresearch" version = "0.1.0" description = "Autonomous pretraining research swarm" readme = "README.md" -requires-python = ">=3.10" +requires-python = ">=3.11" dependencies = [ "kernels>=0.11.7", "matplotlib>=3.10.8", @@ -13,6 +13,7 @@ dependencies = [ "requests>=2.32.0", "rustbpe>=0.1.0", "tiktoken>=0.11.0", + "ttt-discover @ git+https://github.com/test-time-training/discover@5df1a0ee9b04272ca33de0101ae64dd499e63f29", "torch==2.9.1", ] diff --git a/run_ttt_discover.py b/run_ttt_discover.py new file mode 100644 index 00000000..4a475411 --- /dev/null +++ b/run_ttt_discover.py @@ -0,0 +1,5 @@ +from ttt_autoresearch.cli import main + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/fixtures/fake_train.py b/tests/fixtures/fake_train.py new file mode 100644 index 00000000..57118190 --- /dev/null +++ b/tests/fixtures/fake_train.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from pathlib import Path +import json +import re +import sys +import time + + +VAL_RE = re.compile(r"#\s*val_bpb:\s*([-+]?(?:\d+\.?\d*|\.\d+))") +BEHAVIOR_RE = re.compile(r"#\s*behavior:\s*([a-z_]+)") + + +def main() -> int: + train_py = Path("train.py").read_text(encoding="utf-8") + behavior_match = BEHAVIOR_RE.search(train_py) + behavior = behavior_match.group(1) if behavior_match else "success" + if behavior == "timeout": + time.sleep(5) + return 0 + if behavior == "crash": + print("simulated crash", file=sys.stderr) + return 1 + if behavior == "missing_metric": + print("completed without metric") + return 0 + match = VAL_RE.search(train_py) + val_bpb = float(match.group(1)) if match else 1.0 + Path("metrics.json").write_text(json.dumps({"val_bpb": val_bpb}) + "\n", encoding="utf-8") + print("---") + print(f"val_bpb: {val_bpb:.6f}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py new file mode 100644 index 00000000..a919dc6a --- /dev/null +++ b/tests/test_cli_integration.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +from pathlib import Path +import os +import sys +import tempfile +import types +import unittest + +from ttt_autoresearch import cli + + +class CliIntegrationTests(unittest.TestCase): + def test_resolve_config_path_falls_back_to_repo_root(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + repo_root = Path(tmpdir) + (repo_root / "configs").mkdir() + expected = repo_root / "configs" / "ttt_discover_autoresearch.yaml" + expected.write_text("model_name: Qwen/Qwen3.5-35B-A3B\n", encoding="utf-8") + + with tempfile.TemporaryDirectory() as other_tmp: + old_cwd = Path.cwd() + os.chdir(other_tmp) + try: + resolved = cli._resolve_config_path("configs/ttt_discover_autoresearch.yaml", repo_root) + finally: + os.chdir(old_cwd) + + self.assertEqual(resolved, expected.resolve()) + + def test_cli_wires_baseline_and_discover_entrypoint(self) -> None: + captured: dict[str, object] = {} + + fake_root = types.ModuleType("ttt_discover") + fake_rl = types.ModuleType("ttt_discover.rl") + fake_rl_train = types.ModuleType("ttt_discover.rl.train") + fake_utils = types.ModuleType("ttt_discover.tinker_utils") + fake_dataset_builder = types.ModuleType("ttt_discover.tinker_utils.dataset_builder") + + class FakeRLConfig: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + class FakeDatasetConfig: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + captured["dataset_config"] = kwargs + + def fake_get_single_problem_dataset_builder(config): + async def builder(): + captured["dataset_builder_called"] = True + return {"dataset_config": config} + + return builder + + async def fake_discover_main(cfg): + captured["rl_config"] = cfg.__dict__.copy() + await cfg.dataset_builder() + + fake_rl_train.Config = FakeRLConfig + fake_rl_train.main = fake_discover_main + fake_dataset_builder.DatasetConfig = FakeDatasetConfig + fake_dataset_builder.get_single_problem_dataset_builder = fake_get_single_problem_dataset_builder + + previous_modules = {name: sys.modules.get(name) for name in ( + "ttt_discover", + "ttt_discover.rl", + "ttt_discover.rl.train", + "ttt_discover.tinker_utils", + "ttt_discover.tinker_utils.dataset_builder", + )} + sys.modules["ttt_discover"] = fake_root + sys.modules["ttt_discover.rl"] = fake_rl + sys.modules["ttt_discover.rl.train"] = fake_rl_train + sys.modules["ttt_discover.tinker_utils"] = fake_utils + sys.modules["ttt_discover.tinker_utils.dataset_builder"] = fake_dataset_builder + + try: + with tempfile.TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) + run_dir = tmp_path / "runs" / "cli-test" + config_path = tmp_path / "config.yaml" + config_path.write_text( + "\n".join( + [ + "model_name: Qwen/Qwen3.5-35B-A3B", + f"run_dir: {run_dir}", + "max_steps: 3", + "samples_per_step: 2", + "baseline_command_override:", + f" - {sys.executable}", + " - -c", + ' - "print(\'---\'); print(\'val_bpb: 1.000000\')"', + "candidate_command_override:", + f" - {sys.executable}", + " - -c", + ' - "print(\'---\'); print(\'val_bpb: 0.900000\')"', + "wandb_project: null", + ] + ) + + "\n", + encoding="utf-8", + ) + + exit_code = cli.main(["--config", str(config_path)]) + self.assertEqual(exit_code, 0) + self.assertTrue((run_dir / "baseline.json").exists()) + self.assertTrue((run_dir / "best" / "metrics.json").exists()) + self.assertTrue((run_dir / "resolved_config.json").exists()) + self.assertTrue(captured.get("dataset_builder_called")) + self.assertEqual(captured["rl_config"]["model_name"], "Qwen/Qwen3.5-35B-A3B") + self.assertEqual(captured["rl_config"]["num_epochs"], 3) + self.assertEqual(captured["dataset_config"]["group_size"], 2) + self.assertEqual(captured["dataset_config"]["problem_type"], "autoresearch") + finally: + for name, previous in previous_modules.items(): + if previous is None: + sys.modules.pop(name, None) + else: + sys.modules[name] = previous + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py new file mode 100644 index 00000000..0528b141 --- /dev/null +++ b/tests/test_env_smoke.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from pathlib import Path +import asyncio +import tempfile +import unittest +import sys + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.env import AutoResearchDiscoverEnv +from ttt_autoresearch.reward import AutoResearchRewardEvaluator +from ttt_autoresearch.runner import AutoResearchRunner + + +class EnvSmokeTests(unittest.TestCase): + def test_env_prompt_and_reward_flow(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("Focus on val_bpb.", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("# val_bpb: 1.100000\n", encoding="utf-8") + fixtures = root / "tests" / "fixtures" + fixtures.mkdir(parents=True) + fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" + (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") + + config = TTTAutoResearchConfig( + timeout_sec=1, + candidate_command_override=[sys.executable, "tests/fixtures/fake_train.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.1) + AutoResearchDiscoverEnv.configure(bootstrap) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + + state = AutoResearchDiscoverEnv.create_initial_state("autoresearch") + env = AutoResearchDiscoverEnv(renderer=None, initial_state=state, sampler=None, config=type("Cfg", (), { + "problem_type": "autoresearch", + "log_path": str(bootstrap.discover_log_dir), + "eval_timeout": config.eval_timeout, + "num_cpus_per_task": 0, + })()) + + prompt = env.get_question() + self.assertIn("Current best val_bpb: 1.100000", prompt) + self.assertTrue(env.check_format('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}')) + + verify = asyncio.run(env.check_answer('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}', 0)) + self.assertGreater(verify.reward, 0.0) + next_state = env._create_next_state(0, '{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}', verify) + self.assertAlmostEqual(next_state.current_best_val_bpb, 0.9) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_reward.py b/tests/test_reward.py new file mode 100644 index 00000000..496360d5 --- /dev/null +++ b/tests/test_reward.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile +import unittest +import sys + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.env import AutoResearchState +from ttt_autoresearch.reward import AutoResearchRewardEvaluator, reward_for_result +from ttt_autoresearch.runner import AutoResearchRunner, RunResult + + +class RewardTests(unittest.TestCase): + def test_reward_mapping(self) -> None: + result = RunResult( + status="success", + val_bpb=0.9, + stdout_path=Path("stdout.log"), + stderr_path=Path("stderr.log"), + elapsed_sec=1.0, + workspace_path=Path("."), + metrics_path=None, + command=["python", "train.py"], + returncode=0, + ) + reward, correctness = reward_for_result(1.0, result) + self.assertAlmostEqual(reward, 0.1) + self.assertEqual(correctness, 1.0) + + timeout_result = RunResult( + status="timeout", + val_bpb=None, + stdout_path=Path("stdout.log"), + stderr_path=Path("stderr.log"), + elapsed_sec=1.0, + workspace_path=Path("."), + metrics_path=None, + command=["python", "train.py"], + returncode=None, + ) + reward, correctness = reward_for_result(1.0, timeout_result) + self.assertEqual(reward, -0.5) + self.assertEqual(correctness, 0.0) + + def test_evaluator_uses_inner_metric_as_reward(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("# val_bpb: 1.000000\n", encoding="utf-8") + fixtures = root / "tests" / "fixtures" + fixtures.mkdir(parents=True) + fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" + (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") + + config = TTTAutoResearchConfig( + timeout_sec=1, + baseline_command_override=[sys.executable, "tests/fixtures/fake_train.py"], + candidate_command_override=[sys.executable, "tests/fixtures/fake_train.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.0) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + evaluator = AutoResearchRewardEvaluator(problem_type="autoresearch", log_dir=str(bootstrap.run_dir)) + state = AutoResearchState( + timestep=-1, + construction=[], + code=(root / "train.py").read_text(encoding="utf-8"), + value=-1.0, + baseline_val_bpb=1.0, + current_best_val_bpb=1.0, + ) + payload = '{"summary":"improve","rationale":"lower loss","train_py":"# val_bpb: 0.900000\\n"}' + result = evaluator.get_reward(payload, state) + self.assertGreater(result["reward"], 0.0) + self.assertEqual(result["correctness"], 1.0) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_runner.py b/tests/test_runner.py new file mode 100644 index 00000000..b9e974fd --- /dev/null +++ b/tests/test_runner.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile +import unittest +import sys +import os + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.runner import AutoResearchRunner, parse_patch_candidate, parse_val_bpb + + +class RunnerTests(unittest.TestCase): + def test_parse_candidate_rejects_unknown_keys(self) -> None: + with self.assertRaises(ValueError): + parse_patch_candidate('{"summary":"s","rationale":"r","train_py":"x","prepare_py":"bad"}') + + def test_parse_val_bpb(self) -> None: + stdout = "---\nval_bpb: 0.997900\n" + self.assertEqual(parse_val_bpb(stdout), 0.9979) + + def test_runner_reads_metric_and_status(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("# val_bpb: 1.250000\n", encoding="utf-8") + fixtures = root / "tests" / "fixtures" + fixtures.mkdir(parents=True) + fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" + (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") + + config = TTTAutoResearchConfig( + timeout_sec=1, + baseline_command_override=[sys.executable, "tests/fixtures/fake_train.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.25) + result = runner.run_baseline(bootstrap=bootstrap) + self.assertEqual(result.status, "success") + self.assertAlmostEqual(result.val_bpb, 1.25) + + def test_config_normalizes_relative_paths_and_overrides_env(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + config = TTTAutoResearchConfig( + run_dir="relative-runs", + data_path="data/custom", + local_model_path="models/local", + provider="forced-provider", + api_base="https://example.invalid/v1", + ).normalized(root) + self.assertEqual(config.run_dir, str(root / "relative-runs")) + self.assertEqual(config.data_path, str(root / "data/custom")) + self.assertEqual(config.local_model_path, str(root / "models/local")) + + bootstrap = type("Bootstrap", (), {"config": config})() + from ttt_autoresearch.config import BootstrapContext + context = BootstrapContext( + repo_root=root, + run_dir=Path(config.run_dir), + config=config, + program_text="program", + baseline_train_py="train", + baseline_val_bpb=1.0, + ) + old_provider = os.environ.get("TINKER_PROVIDER") + old_base = os.environ.get("OPENAI_BASE_URL") + os.environ["TINKER_PROVIDER"] = "wrong-provider" + os.environ["OPENAI_BASE_URL"] = "https://wrong.invalid" + env = context.subprocess_env() + self.assertEqual(env["TINKER_PROVIDER"], "forced-provider") + self.assertEqual(env["OPENAI_BASE_URL"], "https://example.invalid/v1") + if old_provider is None: + os.environ.pop("TINKER_PROVIDER", None) + else: + os.environ["TINKER_PROVIDER"] = old_provider + if old_base is None: + os.environ.pop("OPENAI_BASE_URL", None) + else: + os.environ["OPENAI_BASE_URL"] = old_base + + +if __name__ == "__main__": + unittest.main() diff --git a/ttt_autoresearch/__init__.py b/ttt_autoresearch/__init__.py new file mode 100644 index 00000000..e6ff3fed --- /dev/null +++ b/ttt_autoresearch/__init__.py @@ -0,0 +1,17 @@ +from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig, load_config +from ttt_autoresearch.env import AutoResearchDiscoverEnv, AutoResearchState +from ttt_autoresearch.reward import AutoResearchRewardEvaluator +from ttt_autoresearch.runner import AutoResearchRunner, PatchCandidate, RunResult, parse_patch_candidate + +__all__ = [ + "AutoResearchDiscoverEnv", + "AutoResearchRewardEvaluator", + "AutoResearchRunner", + "AutoResearchState", + "BootstrapContext", + "PatchCandidate", + "RunResult", + "TTTAutoResearchConfig", + "load_config", + "parse_patch_candidate", +] diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py new file mode 100644 index 00000000..a31a8aaa --- /dev/null +++ b/ttt_autoresearch/cli.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +import argparse +import asyncio +from pathlib import Path +import sys + +from ttt_autoresearch.config import TTTAutoResearchConfig, load_config, write_resolved_config +from ttt_autoresearch.env import AutoResearchDiscoverEnv +from ttt_autoresearch.reward import AutoResearchRewardEvaluator +from ttt_autoresearch.runner import AutoResearchRunner + + +def build_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description="Run AutoResearch with TTT-Discover outer-loop RL.") + parser.add_argument("--config", default="configs/ttt_discover_autoresearch.yaml", help="Path to the YAML config file.") + parser.add_argument("--model-name", help="Override the outer-agent model name.") + parser.add_argument("--provider", help="Override the provider identifier passed via environment variables.") + parser.add_argument("--api-base", help="Override the API base URL passed via environment variables.") + parser.add_argument("--run-dir", help="Override the run output directory.") + return parser + + +def main(argv: list[str] | None = None) -> int: + parser = build_arg_parser() + args = parser.parse_args(argv) + repo_root = Path(__file__).resolve().parent.parent + config_path = _resolve_config_path(args.config, repo_root) + config = load_config(config_path, repo_root=repo_root) + config = _apply_overrides(config, args) + + try: + from ttt_discover.rl.train import Config as RLConfig, main as discover_main + from ttt_discover.tinker_utils.dataset_builder import DatasetConfig, get_single_problem_dataset_builder + except ImportError as exc: + parser.error( + "ttt-discover is not installed. Run `uv sync` after updating dependencies, " + "or install the pinned git dependency from pyproject.toml." + ) + raise AssertionError from exc + + run_dir = Path(config.run_dir) + runner = AutoResearchRunner(repo_root=repo_root, config=config, run_dir=run_dir) + bootstrap = runner.build_bootstrap(baseline_val_bpb=float("inf")) + baseline_result = runner.run_baseline(bootstrap=bootstrap) + if baseline_result.val_bpb is None: + parser.error(f"Baseline run failed with status={baseline_result.status}. Check {baseline_result.stdout_path} and {baseline_result.stderr_path}.") + + bootstrap = runner.build_bootstrap(baseline_val_bpb=baseline_result.val_bpb) + runner.initialize_best_from_baseline(baseline_result, bootstrap.baseline_train_py) + AutoResearchDiscoverEnv.configure(bootstrap) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + write_resolved_config(run_dir / "resolved_config.json", config) + + dataset_config = DatasetConfig( + env_type=AutoResearchDiscoverEnv, + problem_type="autoresearch", + batch_size=1, + group_size=config.samples_per_step, + model_name_for_tokenizer=config.local_model_path or config.model_name, + renderer_name=config.renderer_name, + num_cpus_per_task=config.num_cpus_per_task, + eval_timeout=config.eval_timeout, + log_path=str(bootstrap.discover_log_dir), + ) + dataset_builder = get_single_problem_dataset_builder(dataset_config) + # Keep discover's RL recipe unchanged and only swap in the autoresearch task surface. + rl_config = RLConfig( + env_type=AutoResearchDiscoverEnv, + problem_type="autoresearch", + learning_rate=config.learning_rate, + dataset_builder=dataset_builder, + model_name=config.model_name, + num_epochs=config.max_steps, + temperature=config.temperature, + lora_rank=config.lora_rank, + wandb_project=config.wandb_project, + wandb_name=config.experiment_name, + log_path=str(bootstrap.discover_log_dir), + kl_penalty_coef=config.kl_penalty_coef, + save_every=config.save_every, + remove_constant_reward_groups=True, + phase1_max_tokens=config.phase1_max_tokens, + local_model_path=config.local_model_path, + ) + asyncio.run(discover_main(rl_config)) + return 0 + + +def _resolve_config_path(config_arg: str, repo_root: Path) -> Path: + candidate = Path(config_arg).expanduser() + if candidate.is_absolute(): + return candidate + if candidate.exists(): + return candidate.resolve() + return (repo_root / candidate).resolve() + + +def _apply_overrides(config: TTTAutoResearchConfig, args: argparse.Namespace) -> TTTAutoResearchConfig: + updated = config.to_dict() + if args.model_name: + updated["model_name"] = args.model_name + updated["renderer_name"] = None + if args.provider: + updated["provider"] = args.provider + if args.api_base: + updated["api_base"] = args.api_base + if args.run_dir: + updated["run_dir"] = args.run_dir + return TTTAutoResearchConfig(**updated).normalized(Path(__file__).resolve().parent.parent) + + +if __name__ == "__main__": + raise SystemExit(main(sys.argv[1:])) diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py new file mode 100644 index 00000000..a550a325 --- /dev/null +++ b/ttt_autoresearch/config.py @@ -0,0 +1,211 @@ +from __future__ import annotations + +from dataclasses import asdict, dataclass +from datetime import datetime +from pathlib import Path +import json +import os +import shlex +from typing import Any + + +DISCOVER_GIT_REV = "5df1a0ee9b04272ca33de0101ae64dd499e63f29" + + +@dataclass(slots=True) +class TTTAutoResearchConfig: + model_name: str = "Qwen/Qwen3.5-35B-A3B" + provider: str | None = None + api_base: str | None = None + max_steps: int = 8 + samples_per_step: int = 4 + temperature: float = 1.0 + timeout_sec: int = 2700 + run_dir: str | None = None + data_path: str | None = None + baseline_command_override: list[str] | None = None + candidate_command_override: list[str] | None = None + experiment_name: str | None = None + renderer_name: str | None = None + learning_rate: float = 4e-5 + lora_rank: int = 32 + kl_penalty_coef: float = 0.1 + phase1_max_tokens: int = 26000 + save_every: int = 2 + wandb_project: str | None = "autoresearch-ttt-discover" + num_cpus_per_task: int = 0 + eval_timeout: int | None = None + local_model_path: str | None = None + keep_history: int = 6 + + def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": + run_dir = _resolve_path(self.run_dir, repo_root) if self.run_dir else repo_root / "runs" / datetime.now().strftime("%Y%m%d_%H%M%S") + experiment_name = self.experiment_name or run_dir.name + return TTTAutoResearchConfig( + model_name=self.model_name, + provider=self.provider, + api_base=self.api_base, + max_steps=self.max_steps, + samples_per_step=self.samples_per_step, + temperature=self.temperature, + timeout_sec=self.timeout_sec, + run_dir=str(run_dir), + data_path=_resolve_optional_path_str(self.data_path, repo_root), + baseline_command_override=_normalize_command(self.baseline_command_override), + candidate_command_override=_normalize_command(self.candidate_command_override), + experiment_name=experiment_name, + renderer_name=self.renderer_name or infer_renderer_name(self.model_name), + learning_rate=self.learning_rate, + lora_rank=self.lora_rank, + kl_penalty_coef=self.kl_penalty_coef, + phase1_max_tokens=self.phase1_max_tokens, + save_every=self.save_every, + wandb_project=self.wandb_project, + num_cpus_per_task=self.num_cpus_per_task, + eval_timeout=self.eval_timeout or self.timeout_sec, + local_model_path=_resolve_optional_path_str(self.local_model_path, repo_root), + keep_history=self.keep_history, + ) + + def to_dict(self) -> dict[str, Any]: + return asdict(self) + + +@dataclass(frozen=True, slots=True) +class BootstrapContext: + repo_root: Path + run_dir: Path + config: TTTAutoResearchConfig + program_text: str + baseline_train_py: str + baseline_val_bpb: float + + @property + def history_path(self) -> Path: + return self.run_dir / "history.jsonl" + + @property + def best_dir(self) -> Path: + return self.run_dir / "best" + + @property + def discover_log_dir(self) -> Path: + return self.run_dir / "discover_log" + + @property + def candidates_dir(self) -> Path: + return self.run_dir / "candidates" + + def subprocess_env(self) -> dict[str, str]: + env = dict(os.environ) + if self.config.provider: + env["TINKER_PROVIDER"] = self.config.provider + if self.config.api_base: + env["OPENAI_BASE_URL"] = self.config.api_base + env["OPENAI_API_BASE"] = self.config.api_base + env["TINKER_BASE_URL"] = self.config.api_base + if self.config.data_path: + env["AUTORESEARCH_DATA_PATH"] = self.config.data_path + return env + + +def infer_renderer_name(model_name: str) -> str: + lowered = model_name.lower() + if "qwen" in lowered: + if "instruct" in lowered: + return "qwen3_instruct" + return "qwen3" + if "gpt-oss" in lowered: + return "gpt_oss_high_reasoning" + return "qwen3" + + +def load_config(path: str | os.PathLike[str], repo_root: str | os.PathLike[str] | None = None) -> TTTAutoResearchConfig: + raw = _load_yaml_like(Path(path)) + config = TTTAutoResearchConfig(**raw) + return config.normalized(Path(repo_root) if repo_root else Path.cwd()) + + +def write_resolved_config(path: Path, config: TTTAutoResearchConfig) -> None: + path.write_text(json.dumps(config.to_dict(), indent=2, sort_keys=True) + "\n", encoding="utf-8") + + +def _normalize_command(command: list[str] | str | None) -> list[str] | None: + if command is None: + return None + if isinstance(command, str): + return shlex.split(command) + return [str(part) for part in command] + + +def _resolve_path(path_value: str | os.PathLike[str], repo_root: Path) -> Path: + path = Path(path_value).expanduser() + if path.is_absolute(): + return path + return repo_root / path + + +def _resolve_optional_path_str(path_value: str | os.PathLike[str] | None, repo_root: Path) -> str | None: + if path_value is None: + return None + return str(_resolve_path(path_value, repo_root)) + + +def _coerce_scalar(value: str) -> Any: + lowered = value.lower() + if lowered in {"null", "none"}: + return None + if lowered == "true": + return True + if lowered == "false": + return False + if value.startswith(("'", '"')) and value.endswith(("'", '"')) and len(value) >= 2: + return value[1:-1] + try: + if "." in value: + return float(value) + return int(value) + except ValueError: + return value + + +def _load_yaml_like(path: Path) -> dict[str, Any]: + text = path.read_text(encoding="utf-8") + try: + import yaml + + loaded = yaml.safe_load(text) or {} + if not isinstance(loaded, dict): + raise ValueError(f"{path} must contain a top-level mapping.") + return loaded + except ImportError: + return _parse_minimal_yaml(text) + + +def _parse_minimal_yaml(text: str) -> dict[str, Any]: + result: dict[str, Any] = {} + current_key: str | None = None + current_list: list[Any] | None = None + + for raw_line in text.splitlines(): + line = raw_line.split("#", 1)[0].rstrip() + if not line.strip(): + continue + if line.startswith(" - ") and current_key is not None: + if current_list is None: + current_list = [] + result[current_key] = current_list + current_list.append(_coerce_scalar(line[4:].strip())) + continue + if ":" not in line: + raise ValueError(f"Unsupported config line: {raw_line}") + key, value = line.split(":", 1) + current_key = key.strip() + current_list = None + value = value.strip() + if value == "": + result[current_key] = [] + current_list = result[current_key] + else: + result[current_key] = _coerce_scalar(value) + return result diff --git a/ttt_autoresearch/discover_compat.py b/ttt_autoresearch/discover_compat.py new file mode 100644 index 00000000..3e87eaa1 --- /dev/null +++ b/ttt_autoresearch/discover_compat.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import uuid +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from typing import Any + +try: + from ttt_discover import BaseRewardEvaluator, Environment, State + from ttt_discover.tinker_utils.dataset_builder import VerifyResult +except ImportError: + class BaseRewardEvaluator(ABC): + @abstractmethod + def get_reward(self, code: str, state: Any) -> dict[str, Any]: + raise NotImplementedError + + class State: + def __init__( + self, + timestep: int, + construction: list[Any] | None, + code: str, + value: float | None = None, + parent_values: list[float] | None = None, + parents: list[dict[str, Any]] | None = None, + id: str | None = None, + observation: str = "", + ) -> None: + self.id = id or str(uuid.uuid4()) + self.timestep = timestep + self.construction = construction or [] + self.code = code + self.value = value + self.parent_values = parent_values or [] + self.parents = parents or [] + self.observation = observation + + def to_dict(self) -> dict[str, Any]: + return { + "type": self.__class__.__name__, + "id": self.id, + "timestep": self.timestep, + "value": self.value, + "construction": self.construction, + "code": self.code, + "parent_values": self.parent_values, + "parents": self.parents, + "observation": self.observation, + } + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> "State": + return cls( + timestep=data["timestep"], + construction=data.get("construction"), + code=data["code"], + value=data.get("value"), + parent_values=data.get("parent_values"), + parents=data.get("parents"), + id=data.get("id"), + observation=data.get("observation", ""), + ) + + @dataclass + class VerifyResult: + reward: float + msg: str + correctness: float + raw_score: float + result_construction: Any + stdout: str + metrics: dict[str, Any] = field(default_factory=dict) + + class Environment: + reward_function: type[BaseRewardEvaluator] + state_type: type[State] + + def __init__(self, renderer: Any, initial_state: State, sampler: Any, config: Any) -> None: + self.renderer = renderer + self.initial_state = initial_state + self.state = initial_state + self.sampler = sampler + self.config = config + self.problem_type = getattr(config, "problem_type", "autoresearch") + self.log_path = getattr(config, "log_path", "") + self.eval_timeout = getattr(config, "eval_timeout", 0) + self.num_cpus_per_task = getattr(config, "num_cpus_per_task", 0) + diff --git a/ttt_autoresearch/env.py b/ttt_autoresearch/env.py new file mode 100644 index 00000000..b53e746f --- /dev/null +++ b/ttt_autoresearch/env.py @@ -0,0 +1,294 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +import asyncio +import json +from typing import Any, ClassVar + +from ttt_autoresearch.config import BootstrapContext +from ttt_autoresearch.discover_compat import Environment, State, VerifyResult +from ttt_autoresearch.reward import AutoResearchRewardEvaluator +from ttt_autoresearch.runner import parse_patch_candidate + + +def read_recent_history(history_path: Path, limit: int) -> list[dict[str, Any]]: + if not history_path.exists(): + return [] + entries: list[dict[str, Any]] = [] + for line in history_path.read_text(encoding="utf-8").splitlines(): + if not line.strip(): + continue + try: + entries.append(json.loads(line)) + except json.JSONDecodeError: + continue + return entries[-limit:] + + +class AutoResearchState(State): + def __init__( + self, + timestep: int, + construction: list[Any] | None, + code: str, + value: float | None = None, + parent_values: list[float] | None = None, + parents: list[dict[str, Any]] | None = None, + id: str | None = None, + observation: str = "", + baseline_val_bpb: float | None = None, + current_best_val_bpb: float | None = None, + history: list[dict[str, Any]] | None = None, + summary: str = "", + rationale: str = "", + raw_score: float | None = None, + ) -> None: + super().__init__( + timestep=timestep, + construction=construction or [], + code=code, + value=value, + parent_values=parent_values, + parents=parents, + id=id, + observation=observation, + ) + self.baseline_val_bpb = baseline_val_bpb + self.current_best_val_bpb = current_best_val_bpb + self.history = history or [] + self.summary = summary + self.rationale = rationale + self.raw_score = raw_score + + @property + def step(self) -> int: + return self.timestep + + @property + def current_train_py(self) -> str: + return self.code + + def to_dict(self) -> dict[str, Any]: + payload = super().to_dict() + payload.update( + { + "type": self.__class__.__name__, + "baseline_val_bpb": self.baseline_val_bpb, + "current_best_val_bpb": self.current_best_val_bpb, + "history": self.history, + "summary": self.summary, + "rationale": self.rationale, + "raw_score": self.raw_score, + } + ) + return payload + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> "AutoResearchState": + return cls( + timestep=data["timestep"], + construction=data.get("construction"), + code=data["code"], + value=data.get("value"), + parent_values=data.get("parent_values"), + parents=data.get("parents"), + id=data.get("id"), + observation=data.get("observation", ""), + baseline_val_bpb=data.get("baseline_val_bpb"), + current_best_val_bpb=data.get("current_best_val_bpb"), + history=data.get("history"), + summary=data.get("summary", ""), + rationale=data.get("rationale", ""), + raw_score=data.get("raw_score"), + ) + + +class AutoResearchDiscoverEnv(Environment): + reward_function = AutoResearchRewardEvaluator + state_type = AutoResearchState + bootstrap: ClassVar[BootstrapContext | None] = None + + @classmethod + def configure(cls, bootstrap: BootstrapContext) -> None: + cls.bootstrap = bootstrap + + @classmethod + def create_initial_state(cls, problem_type: str) -> AutoResearchState: + if cls.bootstrap is None: + raise RuntimeError("AutoResearchDiscoverEnv is not configured.") + baseline_stdout = "" + baseline_stdout_path = cls.bootstrap.run_dir / "baseline" / "workspace" / "stdout.log" + if baseline_stdout_path.exists(): + baseline_stdout = baseline_stdout_path.read_text(encoding="utf-8")[:4000] + # Read the actual current best from disk — it may have improved + # across prior RL steps since the baseline was established. + current_best = cls.bootstrap.baseline_val_bpb + best_metrics = cls.bootstrap.best_dir / "metrics.json" + if best_metrics.exists(): + try: + stored = json.loads(best_metrics.read_text(encoding="utf-8")) + if stored.get("val_bpb") is not None: + current_best = float(stored["val_bpb"]) + except (json.JSONDecodeError, KeyError, ValueError): + pass + # Read the best train.py if it has been updated. + best_train_py = cls.bootstrap.baseline_train_py + best_train_path = cls.bootstrap.best_dir / "train.py" + if best_train_path.exists(): + best_train_py = best_train_path.read_text(encoding="utf-8") + return AutoResearchState( + timestep=-1, + construction=[], + code=best_train_py, + value=-current_best, + observation=baseline_stdout, + baseline_val_bpb=cls.bootstrap.baseline_val_bpb, + current_best_val_bpb=current_best, + history=[], + summary="baseline", + rationale="seed state from the original autoresearch train.py", + raw_score=current_best, + ) + + def is_maximize(self) -> bool: + return False + + def _get_code_languages(self) -> list[str]: + return ["json"] + + def _should_keep_code_separators(self) -> bool: + return False + + def get_question(self) -> str: + if self.bootstrap is None: + raise RuntimeError("AutoResearchDiscoverEnv is not configured.") + + state = self.initial_state + history = read_recent_history(self.bootstrap.history_path, self.bootstrap.config.keep_history) + if history: + history_text = "\n".join( + f"- [{entry['status']}] reward={entry['reward']:.6f} val_bpb={entry.get('val_bpb')} summary={entry['summary']}" + for entry in history + ) + else: + history_text = "- No prior candidate evaluations yet." + + return f"""{self.bootstrap.program_text} + +You are the outer autoresearch agent. Your only job is to improve train.py. +You may edit train.py only. Do not modify prepare.py, program.md, or any other file. +The reward comes from running train.py and measuring val_bpb. Lower val_bpb is better. + +Current best val_bpb: {state.current_best_val_bpb:.6f} +Baseline val_bpb: {state.baseline_val_bpb:.6f} + +Current best train.py: +```python +{state.current_train_py} +``` + +Recent accepted and rejected edits: +{history_text} + +Return exactly one ```json``` block with this schema: +{{ + "summary": "short description of the change", + "rationale": "why this should improve val_bpb", + "train_py": "the full replacement contents of train.py" +}} + +Rules: +- train_py must be the full file, not a diff. +- Only edit train.py. +- Keep the file runnable as a standalone script. +- Optimize for the lowest val_bpb under the existing time budget. +""" + + def check_format(self, parsed_code: str) -> bool: + try: + parse_patch_candidate(parsed_code) + except ValueError: + return False + return True + + async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: + if not self.check_format(parsed_code): + return VerifyResult( + reward=-1.0, + msg="Invalid candidate JSON.", + correctness=0.0, + raw_score=float(self.initial_state.current_best_val_bpb), + result_construction=[], + stdout="", + metrics={"candidate_status": "invalid_candidate"}, + ) + + loop = asyncio.get_running_loop() + out = await loop.run_in_executor(None, self._run_reward, parsed_code) + return VerifyResult( + reward=out["reward"], + msg=out["msg"], + correctness=out["correctness"], + raw_score=out["raw_score"], + result_construction=out.get("result_construction", []), + stdout=out.get("stdout", ""), + metrics=out.get("metrics", {}), + ) + + def _create_next_state(self, step_idx: int, parsed_code: str, outs: VerifyResult) -> AutoResearchState: + candidate = parse_patch_candidate(parsed_code) + history_entry = { + "step": step_idx, + "summary": candidate.summary, + "rationale": candidate.rationale, + "reward": outs.reward, + "val_bpb": outs.raw_score, + "status": outs.metrics.get("candidate_status", "unknown"), + } + prior_history = list(getattr(self.initial_state, "history", [])) + next_history = (prior_history + [history_entry])[-10:] + parent_best = self.initial_state.current_best_val_bpb + new_best = min(parent_best, outs.raw_score) if outs.raw_score is not None else parent_best + return AutoResearchState( + timestep=step_idx, + construction=[], + code=candidate.train_py, + value=-outs.raw_score, + observation=outs.stdout, + baseline_val_bpb=self.initial_state.baseline_val_bpb, + current_best_val_bpb=new_best, + history=next_history, + summary=candidate.summary, + rationale=candidate.rationale, + raw_score=outs.raw_score, + ) + + def _build_metrics( + self, + outs: VerifyResult, + correct_format: bool, + message: dict[str, Any], + parsed_code: str, + ) -> dict[str, Any]: + metrics = { + "format": correct_format, + "reward": outs.reward, + "correctness": outs.correctness, + "raw_score": outs.raw_score, + "prompt": self.get_question(), + "response": message["content"], + "parsed_code": parsed_code, + "msg": outs.msg, + } + metrics.update(outs.metrics) + return metrics + + def _run_reward(self, parsed_code: str) -> dict[str, Any]: + evaluator = self.reward_function( + problem_type=self.problem_type, + log_dir=self.log_path, + eval_timeout=self.eval_timeout, + num_cpus_per_task=self.num_cpus_per_task, + ) + return evaluator.get_reward(parsed_code, state=self.initial_state) diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py new file mode 100644 index 00000000..a0ce0416 --- /dev/null +++ b/ttt_autoresearch/reward.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from pathlib import Path +import threading +from typing import Any + +from ttt_autoresearch.config import BootstrapContext +from ttt_autoresearch.discover_compat import BaseRewardEvaluator +from ttt_autoresearch.runner import AutoResearchRunner, PatchCandidate, RunResult, parse_patch_candidate + + +_ARTIFACT_LOCK = threading.Lock() + + +def reward_for_result(current_best_val_bpb: float, result: RunResult) -> tuple[float, float]: + if result.status == "timeout": + return -0.5, 0.0 + if result.status == "missing_metric": + return -0.75, 0.0 + if result.status != "success" or result.val_bpb is None: + return -1.0, 0.0 + reward = current_best_val_bpb - result.val_bpb + correctness = 1.0 if reward > 0 else 0.0 + return reward, correctness + + +class AutoResearchRewardEvaluator(BaseRewardEvaluator): + bootstrap: BootstrapContext | None = None + runner: AutoResearchRunner | None = None + + @classmethod + def configure(cls, bootstrap: BootstrapContext, runner: AutoResearchRunner) -> None: + cls.bootstrap = bootstrap + cls.runner = runner + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.problem_type = kwargs.get("problem_type", "autoresearch") + self.log_dir = kwargs.get("log_dir") + self.eval_timeout = kwargs.get("eval_timeout") + self.num_cpus_per_task = kwargs.get("num_cpus_per_task") + + def get_reward(self, code: str, state: Any) -> dict[str, Any]: + if self.bootstrap is None or self.runner is None: + raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + + try: + candidate = parse_patch_candidate(code) + except ValueError as exc: + return self._failure_payload( + reward=-1.0, + raw_score=self._current_best_from_state(state), + msg=f"Invalid candidate payload: {exc}", + status="invalid_candidate", + ) + + result = self.runner.run_candidate( + bootstrap=self.bootstrap, + candidate=candidate, + step=getattr(state, "timestep", -1) + 1, + state_id=getattr(state, "id", "unknown"), + ) + current_best = self._current_best_from_state(state) + reward, correctness = reward_for_result(current_best, result) + improved_global_best = False + + with _ARTIFACT_LOCK: + if result.status == "success" and result.val_bpb is not None: + improved_global_best = self.runner.update_best( + train_py_text=candidate.train_py, + result=result, + summary=candidate.summary, + rationale=candidate.rationale, + ) + history_entry = { + "step": getattr(state, "timestep", -1) + 1, + "state_id": getattr(state, "id", "unknown"), + "status": result.status, + "summary": candidate.summary, + "rationale": candidate.rationale, + "reward": reward, + "accepted": bool(correctness), + "val_bpb": result.val_bpb, + "parent_val_bpb": current_best, + "stdout_path": str(result.stdout_path), + "stderr_path": str(result.stderr_path), + "workspace_path": str(result.workspace_path), + "improved_global_best": improved_global_best, + } + self.runner.append_history(history_entry) + + message = self._build_message(candidate, result, current_best, reward) + stdout = self.runner.read_text(result.stdout_path) + raw_score = result.val_bpb if result.val_bpb is not None else current_best + return { + "reward": float(reward), + "msg": message, + "correctness": float(correctness), + "raw_score": float(raw_score), + "result_construction": [], + "stdout": stdout, + "metrics": { + "candidate_summary": candidate.summary, + "candidate_rationale": candidate.rationale, + "candidate_status": result.status, + "candidate_val_bpb": result.val_bpb, + "workspace_path": str(result.workspace_path), + "stdout_path": str(result.stdout_path), + "stderr_path": str(result.stderr_path), + "improved_global_best": improved_global_best, + }, + } + + @staticmethod + def _build_message(candidate: PatchCandidate, result: RunResult, current_best: float, reward: float) -> str: + val_bpb = "n/a" if result.val_bpb is None else f"{result.val_bpb:.6f}" + return ( + f"{candidate.summary}\n" + f"status={result.status} parent_val_bpb={current_best:.6f} " + f"candidate_val_bpb={val_bpb} reward={reward:.6f}" + ) + + @staticmethod + def _current_best_from_state(state: Any) -> float: + current_best = getattr(state, "current_best_val_bpb", None) + if current_best is not None: + return float(current_best) + value = getattr(state, "value", None) + if value is None: + raise RuntimeError("State is missing current_best_val_bpb and value.") + return float(-value) + + @staticmethod + def _failure_payload(reward: float, raw_score: float, msg: str, status: str) -> dict[str, Any]: + return { + "reward": float(reward), + "msg": msg, + "correctness": 0.0, + "raw_score": float(raw_score), + "result_construction": [], + "stdout": "", + "metrics": { + "candidate_status": status, + }, + } diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py new file mode 100644 index 00000000..ccc0faa6 --- /dev/null +++ b/ttt_autoresearch/runner.py @@ -0,0 +1,288 @@ +from __future__ import annotations + +from dataclasses import asdict, dataclass +from pathlib import Path +import json +import os +import re +import shutil +import subprocess +import sys +import time +import uuid +from typing import Any + +from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig + + +VAL_BPB_RE = re.compile(r"^val_bpb:\s*([-+]?(?:\d+\.?\d*|\.\d+)(?:[eE][-+]?\d+)?)", re.MULTILINE) +ALLOWED_CANDIDATE_KEYS = {"summary", "rationale", "train_py"} + + +@dataclass(slots=True) +class PatchCandidate: + summary: str + rationale: str + train_py: str + + +@dataclass(slots=True) +class RunResult: + status: str + val_bpb: float | None + stdout_path: Path + stderr_path: Path + elapsed_sec: float + workspace_path: Path + metrics_path: Path | None + command: list[str] + returncode: int | None + + def to_dict(self) -> dict[str, Any]: + data = asdict(self) + data["stdout_path"] = str(self.stdout_path) + data["stderr_path"] = str(self.stderr_path) + data["workspace_path"] = str(self.workspace_path) + data["metrics_path"] = str(self.metrics_path) if self.metrics_path else None + return data + + +def parse_patch_candidate(candidate_json: str) -> PatchCandidate: + try: + payload = json.loads(candidate_json) + except json.JSONDecodeError as exc: + raise ValueError(f"Candidate must be valid JSON: {exc}") from exc + if not isinstance(payload, dict): + raise ValueError("Candidate payload must be a JSON object.") + unknown_keys = set(payload) - ALLOWED_CANDIDATE_KEYS + if unknown_keys: + raise ValueError(f"Candidate may only contain {sorted(ALLOWED_CANDIDATE_KEYS)}. Found {sorted(unknown_keys)}.") + missing = [key for key in ("summary", "rationale", "train_py") if key not in payload] + if missing: + raise ValueError(f"Candidate is missing required keys: {missing}.") + summary = payload["summary"] + rationale = payload["rationale"] + train_py = payload["train_py"] + if not all(isinstance(value, str) for value in (summary, rationale, train_py)): + raise ValueError("Candidate fields summary, rationale, and train_py must all be strings.") + if not train_py.strip(): + raise ValueError("train_py must contain the full replacement file.") + return PatchCandidate(summary=summary.strip(), rationale=rationale.strip(), train_py=train_py) + + +def parse_val_bpb(stdout: str) -> float | None: + match = VAL_BPB_RE.search(stdout) + if not match: + return None + return float(match.group(1)) + + +class AutoResearchRunner: + def __init__(self, repo_root: Path, config: TTTAutoResearchConfig, run_dir: Path) -> None: + self.repo_root = repo_root + self.config = config + self.run_dir = run_dir + self.run_dir.mkdir(parents=True, exist_ok=True) + (self.run_dir / "baseline").mkdir(exist_ok=True) + (self.run_dir / "candidates").mkdir(exist_ok=True) + (self.run_dir / "best").mkdir(exist_ok=True) + + def build_bootstrap(self, baseline_val_bpb: float) -> BootstrapContext: + program_text = (self.repo_root / "program.md").read_text(encoding="utf-8") + baseline_train_py = (self.repo_root / "train.py").read_text(encoding="utf-8") + return BootstrapContext( + repo_root=self.repo_root, + run_dir=self.run_dir, + config=self.config, + program_text=program_text, + baseline_train_py=baseline_train_py, + baseline_val_bpb=baseline_val_bpb, + ) + + def run_baseline(self, bootstrap: BootstrapContext | None = None) -> RunResult: + workspace = self.run_dir / "baseline" / "workspace" + self._copy_repo(workspace) + result = self._execute_workspace( + workspace=workspace, + command_template=self.config.baseline_command_override, + bootstrap=bootstrap, + label="baseline", + ) + self._write_json(self.run_dir / "baseline.json", result.to_dict()) + return result + + def run_candidate( + self, + bootstrap: BootstrapContext, + candidate: PatchCandidate, + step: int, + state_id: str, + ) -> RunResult: + workspace = self.run_dir / "candidates" / f"{step:04d}_{uuid.uuid4().hex[:8]}" + self._copy_repo(workspace) + (workspace / "train.py").write_text(candidate.train_py, encoding="utf-8") + result = self._execute_workspace( + workspace=workspace, + command_template=self.config.candidate_command_override, + bootstrap=bootstrap, + label=f"candidate-{step:04d}", + state_id=state_id, + ) + return result + + def initialize_best_from_baseline(self, baseline_result: RunResult, train_py_text: str) -> None: + if baseline_result.val_bpb is None: + return + self.update_best(train_py_text, baseline_result, summary="baseline", rationale="seed baseline") + + def update_best(self, train_py_text: str, result: RunResult, summary: str, rationale: str) -> bool: + if result.val_bpb is None: + return False + best_metrics_path = self.run_dir / "best" / "metrics.json" + current_best = self.read_best_val_bpb() + if current_best is not None and result.val_bpb >= current_best: + return False + (self.run_dir / "best" / "train.py").write_text(train_py_text, encoding="utf-8") + self._write_json( + best_metrics_path, + { + "summary": summary, + "rationale": rationale, + "val_bpb": result.val_bpb, + "status": result.status, + "stdout_path": str(result.stdout_path), + "stderr_path": str(result.stderr_path), + "workspace_path": str(result.workspace_path), + "elapsed_sec": result.elapsed_sec, + }, + ) + return True + + def read_best_val_bpb(self) -> float | None: + best_metrics_path = self.run_dir / "best" / "metrics.json" + if not best_metrics_path.exists(): + return None + data = json.loads(best_metrics_path.read_text(encoding="utf-8")) + value = data.get("val_bpb") + return float(value) if value is not None else None + + def append_history(self, entry: dict[str, Any]) -> None: + history_path = self.run_dir / "history.jsonl" + with history_path.open("a", encoding="utf-8") as handle: + handle.write(json.dumps(entry, sort_keys=True) + "\n") + + def _copy_repo(self, workspace: Path) -> None: + if workspace.exists(): + shutil.rmtree(workspace) + shutil.copytree( + self.repo_root, + workspace, + ignore=shutil.ignore_patterns(".git", "runs", "__pycache__", ".pytest_cache", ".venv", "*.pyc", "*.pyo"), + ) + + def _execute_workspace( + self, + workspace: Path, + command_template: list[str] | None, + bootstrap: BootstrapContext | None, + label: str, + state_id: str | None = None, + ) -> RunResult: + command = self._resolve_command(command_template, workspace, bootstrap, label, state_id) + env = bootstrap.subprocess_env() if bootstrap else dict(os.environ) + stdout_path = workspace / "stdout.log" + stderr_path = workspace / "stderr.log" + metrics_path = workspace / "metrics.json" + start = time.time() + try: + proc = subprocess.run( + command, + cwd=workspace, + env=env, + timeout=self.config.timeout_sec, + text=True, + capture_output=True, + check=False, + ) + stdout = proc.stdout + stderr = proc.stderr + returncode = proc.returncode + status = "success" if returncode == 0 else "crash" + except subprocess.TimeoutExpired as exc: + stdout = exc.stdout or "" + stderr = exc.stderr or "" + returncode = None + status = "timeout" + elapsed_sec = time.time() - start + stdout_path.write_text(stdout, encoding="utf-8") + stderr_path.write_text(stderr, encoding="utf-8") + + val_bpb = self._read_val_bpb(stdout, metrics_path) + if status == "success" and val_bpb is None: + status = "missing_metric" + + if val_bpb is not None and metrics_path.exists(): + metrics = json.loads(metrics_path.read_text(encoding="utf-8")) + else: + metrics = {"val_bpb": val_bpb} + self._write_json(metrics_path, metrics) + + return RunResult( + status=status, + val_bpb=val_bpb, + stdout_path=stdout_path, + stderr_path=stderr_path, + elapsed_sec=elapsed_sec, + workspace_path=workspace, + metrics_path=metrics_path, + command=command, + returncode=returncode, + ) + + def _read_val_bpb(self, stdout: str, metrics_path: Path) -> float | None: + direct = parse_val_bpb(stdout) + if direct is not None: + return direct + if metrics_path.exists(): + try: + payload = json.loads(metrics_path.read_text(encoding="utf-8")) + except json.JSONDecodeError: + return None + value = payload.get("val_bpb") + return float(value) if value is not None else None + return None + + def _resolve_command( + self, + command_template: list[str] | None, + workspace: Path, + bootstrap: BootstrapContext | None, + label: str, + state_id: str | None, + ) -> list[str]: + template = command_template or [sys.executable, "train.py"] + values = { + "workspace": str(workspace), + "repo_root": str(self.repo_root), + "run_dir": str(self.run_dir), + "label": label, + "state_id": state_id or "", + "data_path": bootstrap.config.data_path if bootstrap and bootstrap.config.data_path else "", + } + resolved = [] + for part in template: + for key, val in values.items(): + part = part.replace("{" + key + "}", val) + resolved.append(part) + return resolved + + @staticmethod + def read_text(path: Path, max_chars: int = 4000) -> str: + text = path.read_text(encoding="utf-8") if path.exists() else "" + if len(text) <= max_chars: + return text + return text[:max_chars] + "\n...(truncated)...\n" + + @staticmethod + def _write_json(path: Path, payload: dict[str, Any]) -> None: + path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") diff --git a/uv.lock b/uv.lock index c840d62f..027f6a81 100644 --- a/uv.lock +++ b/uv.lock @@ -1,21 +1,146 @@ version = 1 revision = 3 -requires-python = ">=3.10" +requires-python = ">=3.11" resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", "python_full_version >= '3.14' and sys_platform == 'win32'", "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and sys_platform == 'linux'", + "python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'emscripten'", - "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.11' and sys_platform != 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.12' and sys_platform == 'win32'", + "python_full_version < '3.12' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and sys_platform == 'darwin'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" }, + { url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" }, + { url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" }, + { url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" }, + { url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" }, + { url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, + { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, + { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, + { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, + { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, + { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, + { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, + { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, + { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, + { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, + { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, + { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, + { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, + { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, + { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, + { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, + { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, + { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, + { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, + { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, + { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, + { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, + { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, + { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, + { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, + { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, + { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, + { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, + { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, + { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, + { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, + { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] [[package]] @@ -27,12 +152,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "antlr4-python3-runtime" +version = "4.9.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/38/7859ff46355f76f8d19459005ca000b6e7012f2f1ca597746cbcd1fbfe5e/antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b", size = 117034, upload-time = "2021-11-06T17:52:23.524Z" } + [[package]] name = "anyio" version = "4.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] @@ -41,6 +180,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + [[package]] name = "autoresearch" version = "0.1.0" @@ -48,15 +196,14 @@ source = { virtual = "." } dependencies = [ { name = "kernels" }, { name = "matplotlib" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "pandas", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "pandas", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy" }, + { name = "pandas" }, { name = "pyarrow" }, { name = "requests" }, { name = "rustbpe" }, { name = "tiktoken" }, { name = "torch" }, + { name = "ttt-discover" }, ] [package.metadata] @@ -70,6 +217,7 @@ requires-dist = [ { name = "rustbpe", specifier = ">=0.1.0" }, { name = "tiktoken", specifier = ">=0.11.0" }, { name = "torch", specifier = "==2.9.1", index = "https://download.pytorch.org/whl/cu128" }, + { name = "ttt-discover", git = "https://github.com/test-time-training/discover?rev=5df1a0ee9b04272ca33de0101ae64dd499e63f29" }, ] [[package]] @@ -87,22 +235,6 @@ version = "3.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, - { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, - { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, - { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, - { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, - { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, - { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, - { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, - { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, - { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, - { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, - { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, @@ -170,6 +302,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] +[[package]] +name = "chz" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/6c/09c8ca50c40e18be211f25ad6dcdb81f8110ba2d611cd0375f5fb65fb762/chz-0.4.0.tar.gz", hash = "sha256:5380039e6970a1056c2140288aafa41a33f26d5e4c685117be80f7e260c8d679", size = 82473, upload-time = "2025-11-24T00:55:10.634Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/eb/77789ad6f1807328a61c205881580546af597f60334f1f96fd4f3bb6e929/chz-0.4.0-py3-none-any.whl", hash = "sha256:5db5ffe42f6be38f1c37e1b18f0d5559572ee8a8dc941116e67f1bd5396e2a9b", size = 56277, upload-time = "2025-11-24T00:55:09.381Z" }, +] + [[package]] name = "click" version = "8.3.1" @@ -183,105 +327,29 @@ wheels = [ ] [[package]] -name = "colorama" -version = "0.4.6" +name = "cloudpickle" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, ] [[package]] -name = "contourpy" -version = "1.3.2" +name = "colorama" +version = "0.4.6" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform != 'linux'", -] -dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/54/eb9bfc647b19f2009dd5c7f5ec51c4e6ca831725f1aea7a993034f483147/contourpy-1.3.2.tar.gz", hash = "sha256:b6945942715a034c671b7fc54f9588126b0b8bf23db2696e3ca8328f3ff0ab54", size = 13466130, upload-time = "2025-04-15T17:47:53.79Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/a3/da4153ec8fe25d263aa48c1a4cbde7f49b59af86f0b6f7862788c60da737/contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934", size = 268551, upload-time = "2025-04-15T17:34:46.581Z" }, - { url = "https://files.pythonhosted.org/packages/2f/6c/330de89ae1087eb622bfca0177d32a7ece50c3ef07b28002de4757d9d875/contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989", size = 253399, upload-time = "2025-04-15T17:34:51.427Z" }, - { url = "https://files.pythonhosted.org/packages/c1/bd/20c6726b1b7f81a8bee5271bed5c165f0a8e1f572578a9d27e2ccb763cb2/contourpy-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9be002b31c558d1ddf1b9b415b162c603405414bacd6932d031c5b5a8b757f0d", size = 312061, upload-time = "2025-04-15T17:34:55.961Z" }, - { url = "https://files.pythonhosted.org/packages/22/fc/a9665c88f8a2473f823cf1ec601de9e5375050f1958cbb356cdf06ef1ab6/contourpy-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d2e74acbcba3bfdb6d9d8384cdc4f9260cae86ed9beee8bd5f54fee49a430b9", size = 351956, upload-time = "2025-04-15T17:35:00.992Z" }, - { url = "https://files.pythonhosted.org/packages/25/eb/9f0a0238f305ad8fb7ef42481020d6e20cf15e46be99a1fcf939546a177e/contourpy-1.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e259bced5549ac64410162adc973c5e2fb77f04df4a439d00b478e57a0e65512", size = 320872, upload-time = "2025-04-15T17:35:06.177Z" }, - { url = "https://files.pythonhosted.org/packages/32/5c/1ee32d1c7956923202f00cf8d2a14a62ed7517bdc0ee1e55301227fc273c/contourpy-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad687a04bc802cbe8b9c399c07162a3c35e227e2daccf1668eb1f278cb698631", size = 325027, upload-time = "2025-04-15T17:35:11.244Z" }, - { url = "https://files.pythonhosted.org/packages/83/bf/9baed89785ba743ef329c2b07fd0611d12bfecbedbdd3eeecf929d8d3b52/contourpy-1.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cdd22595308f53ef2f891040ab2b93d79192513ffccbd7fe19be7aa773a5e09f", size = 1306641, upload-time = "2025-04-15T17:35:26.701Z" }, - { url = "https://files.pythonhosted.org/packages/d4/cc/74e5e83d1e35de2d28bd97033426b450bc4fd96e092a1f7a63dc7369b55d/contourpy-1.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b4f54d6a2defe9f257327b0f243612dd051cc43825587520b1bf74a31e2f6ef2", size = 1374075, upload-time = "2025-04-15T17:35:43.204Z" }, - { url = "https://files.pythonhosted.org/packages/0c/42/17f3b798fd5e033b46a16f8d9fcb39f1aba051307f5ebf441bad1ecf78f8/contourpy-1.3.2-cp310-cp310-win32.whl", hash = "sha256:f939a054192ddc596e031e50bb13b657ce318cf13d264f095ce9db7dc6ae81c0", size = 177534, upload-time = "2025-04-15T17:35:46.554Z" }, - { url = "https://files.pythonhosted.org/packages/54/ec/5162b8582f2c994721018d0c9ece9dc6ff769d298a8ac6b6a652c307e7df/contourpy-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c440093bbc8fc21c637c03bafcbef95ccd963bc6e0514ad887932c18ca2a759a", size = 221188, upload-time = "2025-04-15T17:35:50.064Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b9/ede788a0b56fc5b071639d06c33cb893f68b1178938f3425debebe2dab78/contourpy-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a37a2fb93d4df3fc4c0e363ea4d16f83195fc09c891bc8ce072b9d084853445", size = 269636, upload-time = "2025-04-15T17:35:54.473Z" }, - { url = "https://files.pythonhosted.org/packages/e6/75/3469f011d64b8bbfa04f709bfc23e1dd71be54d05b1b083be9f5b22750d1/contourpy-1.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7cd50c38f500bbcc9b6a46643a40e0913673f869315d8e70de0438817cb7773", size = 254636, upload-time = "2025-04-15T17:35:58.283Z" }, - { url = "https://files.pythonhosted.org/packages/8d/2f/95adb8dae08ce0ebca4fd8e7ad653159565d9739128b2d5977806656fcd2/contourpy-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6658ccc7251a4433eebd89ed2672c2ed96fba367fd25ca9512aa92a4b46c4f1", size = 313053, upload-time = "2025-04-15T17:36:03.235Z" }, - { url = "https://files.pythonhosted.org/packages/c3/a6/8ccf97a50f31adfa36917707fe39c9a0cbc24b3bbb58185577f119736cc9/contourpy-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:70771a461aaeb335df14deb6c97439973d253ae70660ca085eec25241137ef43", size = 352985, upload-time = "2025-04-15T17:36:08.275Z" }, - { url = "https://files.pythonhosted.org/packages/1d/b6/7925ab9b77386143f39d9c3243fdd101621b4532eb126743201160ffa7e6/contourpy-1.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65a887a6e8c4cd0897507d814b14c54a8c2e2aa4ac9f7686292f9769fcf9a6ab", size = 323750, upload-time = "2025-04-15T17:36:13.29Z" }, - { url = "https://files.pythonhosted.org/packages/c2/f3/20c5d1ef4f4748e52d60771b8560cf00b69d5c6368b5c2e9311bcfa2a08b/contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3859783aefa2b8355697f16642695a5b9792e7a46ab86da1118a4a23a51a33d7", size = 326246, upload-time = "2025-04-15T17:36:18.329Z" }, - { url = "https://files.pythonhosted.org/packages/8c/e5/9dae809e7e0b2d9d70c52b3d24cba134dd3dad979eb3e5e71f5df22ed1f5/contourpy-1.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eab0f6db315fa4d70f1d8ab514e527f0366ec021ff853d7ed6a2d33605cf4b83", size = 1308728, upload-time = "2025-04-15T17:36:33.878Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4a/0058ba34aeea35c0b442ae61a4f4d4ca84d6df8f91309bc2d43bb8dd248f/contourpy-1.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d91a3ccc7fea94ca0acab82ceb77f396d50a1f67412efe4c526f5d20264e6ecd", size = 1375762, upload-time = "2025-04-15T17:36:51.295Z" }, - { url = "https://files.pythonhosted.org/packages/09/33/7174bdfc8b7767ef2c08ed81244762d93d5c579336fc0b51ca57b33d1b80/contourpy-1.3.2-cp311-cp311-win32.whl", hash = "sha256:1c48188778d4d2f3d48e4643fb15d8608b1d01e4b4d6b0548d9b336c28fc9b6f", size = 178196, upload-time = "2025-04-15T17:36:55.002Z" }, - { url = "https://files.pythonhosted.org/packages/5e/fe/4029038b4e1c4485cef18e480b0e2cd2d755448bb071eb9977caac80b77b/contourpy-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:5ebac872ba09cb8f2131c46b8739a7ff71de28a24c869bcad554477eb089a878", size = 222017, upload-time = "2025-04-15T17:36:58.576Z" }, - { url = "https://files.pythonhosted.org/packages/34/f7/44785876384eff370c251d58fd65f6ad7f39adce4a093c934d4a67a7c6b6/contourpy-1.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4caf2bcd2969402bf77edc4cb6034c7dd7c0803213b3523f111eb7460a51b8d2", size = 271580, upload-time = "2025-04-15T17:37:03.105Z" }, - { url = "https://files.pythonhosted.org/packages/93/3b/0004767622a9826ea3d95f0e9d98cd8729015768075d61f9fea8eeca42a8/contourpy-1.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:82199cb78276249796419fe36b7386bd8d2cc3f28b3bc19fe2454fe2e26c4c15", size = 255530, upload-time = "2025-04-15T17:37:07.026Z" }, - { url = "https://files.pythonhosted.org/packages/e7/bb/7bd49e1f4fa805772d9fd130e0d375554ebc771ed7172f48dfcd4ca61549/contourpy-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106fab697af11456fcba3e352ad50effe493a90f893fca6c2ca5c033820cea92", size = 307688, upload-time = "2025-04-15T17:37:11.481Z" }, - { url = "https://files.pythonhosted.org/packages/fc/97/e1d5dbbfa170725ef78357a9a0edc996b09ae4af170927ba8ce977e60a5f/contourpy-1.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d14f12932a8d620e307f715857107b1d1845cc44fdb5da2bc8e850f5ceba9f87", size = 347331, upload-time = "2025-04-15T17:37:18.212Z" }, - { url = "https://files.pythonhosted.org/packages/6f/66/e69e6e904f5ecf6901be3dd16e7e54d41b6ec6ae3405a535286d4418ffb4/contourpy-1.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:532fd26e715560721bb0d5fc7610fce279b3699b018600ab999d1be895b09415", size = 318963, upload-time = "2025-04-15T17:37:22.76Z" }, - { url = "https://files.pythonhosted.org/packages/a8/32/b8a1c8965e4f72482ff2d1ac2cd670ce0b542f203c8e1d34e7c3e6925da7/contourpy-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b383144cf2d2c29f01a1e8170f50dacf0eac02d64139dcd709a8ac4eb3cfe", size = 323681, upload-time = "2025-04-15T17:37:33.001Z" }, - { url = "https://files.pythonhosted.org/packages/30/c6/12a7e6811d08757c7162a541ca4c5c6a34c0f4e98ef2b338791093518e40/contourpy-1.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c49f73e61f1f774650a55d221803b101d966ca0c5a2d6d5e4320ec3997489441", size = 1308674, upload-time = "2025-04-15T17:37:48.64Z" }, - { url = "https://files.pythonhosted.org/packages/2a/8a/bebe5a3f68b484d3a2b8ffaf84704b3e343ef1addea528132ef148e22b3b/contourpy-1.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3d80b2c0300583228ac98d0a927a1ba6a2ba6b8a742463c564f1d419ee5b211e", size = 1380480, upload-time = "2025-04-15T17:38:06.7Z" }, - { url = "https://files.pythonhosted.org/packages/34/db/fcd325f19b5978fb509a7d55e06d99f5f856294c1991097534360b307cf1/contourpy-1.3.2-cp312-cp312-win32.whl", hash = "sha256:90df94c89a91b7362e1142cbee7568f86514412ab8a2c0d0fca72d7e91b62912", size = 178489, upload-time = "2025-04-15T17:38:10.338Z" }, - { url = "https://files.pythonhosted.org/packages/01/c8/fadd0b92ffa7b5eb5949bf340a63a4a496a6930a6c37a7ba0f12acb076d6/contourpy-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:8c942a01d9163e2e5cfb05cb66110121b8d07ad438a17f9e766317bcb62abf73", size = 223042, upload-time = "2025-04-15T17:38:14.239Z" }, - { url = "https://files.pythonhosted.org/packages/2e/61/5673f7e364b31e4e7ef6f61a4b5121c5f170f941895912f773d95270f3a2/contourpy-1.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:de39db2604ae755316cb5967728f4bea92685884b1e767b7c24e983ef5f771cb", size = 271630, upload-time = "2025-04-15T17:38:19.142Z" }, - { url = "https://files.pythonhosted.org/packages/ff/66/a40badddd1223822c95798c55292844b7e871e50f6bfd9f158cb25e0bd39/contourpy-1.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f9e896f447c5c8618f1edb2bafa9a4030f22a575ec418ad70611450720b5b08", size = 255670, upload-time = "2025-04-15T17:38:23.688Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c7/cf9fdee8200805c9bc3b148f49cb9482a4e3ea2719e772602a425c9b09f8/contourpy-1.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71e2bd4a1c4188f5c2b8d274da78faab884b59df20df63c34f74aa1813c4427c", size = 306694, upload-time = "2025-04-15T17:38:28.238Z" }, - { url = "https://files.pythonhosted.org/packages/dd/e7/ccb9bec80e1ba121efbffad7f38021021cda5be87532ec16fd96533bb2e0/contourpy-1.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de425af81b6cea33101ae95ece1f696af39446db9682a0b56daaa48cfc29f38f", size = 345986, upload-time = "2025-04-15T17:38:33.502Z" }, - { url = "https://files.pythonhosted.org/packages/dc/49/ca13bb2da90391fa4219fdb23b078d6065ada886658ac7818e5441448b78/contourpy-1.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:977e98a0e0480d3fe292246417239d2d45435904afd6d7332d8455981c408b85", size = 318060, upload-time = "2025-04-15T17:38:38.672Z" }, - { url = "https://files.pythonhosted.org/packages/c8/65/5245ce8c548a8422236c13ffcdcdada6a2a812c361e9e0c70548bb40b661/contourpy-1.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:434f0adf84911c924519d2b08fc10491dd282b20bdd3fa8f60fd816ea0b48841", size = 322747, upload-time = "2025-04-15T17:38:43.712Z" }, - { url = "https://files.pythonhosted.org/packages/72/30/669b8eb48e0a01c660ead3752a25b44fdb2e5ebc13a55782f639170772f9/contourpy-1.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c66c4906cdbc50e9cba65978823e6e00b45682eb09adbb78c9775b74eb222422", size = 1308895, upload-time = "2025-04-15T17:39:00.224Z" }, - { url = "https://files.pythonhosted.org/packages/05/5a/b569f4250decee6e8d54498be7bdf29021a4c256e77fe8138c8319ef8eb3/contourpy-1.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8b7fc0cd78ba2f4695fd0a6ad81a19e7e3ab825c31b577f384aa9d7817dc3bef", size = 1379098, upload-time = "2025-04-15T17:43:29.649Z" }, - { url = "https://files.pythonhosted.org/packages/19/ba/b227c3886d120e60e41b28740ac3617b2f2b971b9f601c835661194579f1/contourpy-1.3.2-cp313-cp313-win32.whl", hash = "sha256:15ce6ab60957ca74cff444fe66d9045c1fd3e92c8936894ebd1f3eef2fff075f", size = 178535, upload-time = "2025-04-15T17:44:44.532Z" }, - { url = "https://files.pythonhosted.org/packages/12/6e/2fed56cd47ca739b43e892707ae9a13790a486a3173be063681ca67d2262/contourpy-1.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e1578f7eafce927b168752ed7e22646dad6cd9bca673c60bff55889fa236ebf9", size = 223096, upload-time = "2025-04-15T17:44:48.194Z" }, - { url = "https://files.pythonhosted.org/packages/54/4c/e76fe2a03014a7c767d79ea35c86a747e9325537a8b7627e0e5b3ba266b4/contourpy-1.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0475b1f6604896bc7c53bb070e355e9321e1bc0d381735421a2d2068ec56531f", size = 285090, upload-time = "2025-04-15T17:43:34.084Z" }, - { url = "https://files.pythonhosted.org/packages/7b/e2/5aba47debd55d668e00baf9651b721e7733975dc9fc27264a62b0dd26eb8/contourpy-1.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c85bb486e9be652314bb5b9e2e3b0d1b2e643d5eec4992c0fbe8ac71775da739", size = 268643, upload-time = "2025-04-15T17:43:38.626Z" }, - { url = "https://files.pythonhosted.org/packages/a1/37/cd45f1f051fe6230f751cc5cdd2728bb3a203f5619510ef11e732109593c/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:745b57db7758f3ffc05a10254edd3182a2a83402a89c00957a8e8a22f5582823", size = 310443, upload-time = "2025-04-15T17:43:44.522Z" }, - { url = "https://files.pythonhosted.org/packages/8b/a2/36ea6140c306c9ff6dd38e3bcec80b3b018474ef4d17eb68ceecd26675f4/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:970e9173dbd7eba9b4e01aab19215a48ee5dd3f43cef736eebde064a171f89a5", size = 349865, upload-time = "2025-04-15T17:43:49.545Z" }, - { url = "https://files.pythonhosted.org/packages/95/b7/2fc76bc539693180488f7b6cc518da7acbbb9e3b931fd9280504128bf956/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6c4639a9c22230276b7bffb6a850dfc8258a2521305e1faefe804d006b2e532", size = 321162, upload-time = "2025-04-15T17:43:54.203Z" }, - { url = "https://files.pythonhosted.org/packages/f4/10/76d4f778458b0aa83f96e59d65ece72a060bacb20cfbee46cf6cd5ceba41/contourpy-1.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc829960f34ba36aad4302e78eabf3ef16a3a100863f0d4eeddf30e8a485a03b", size = 327355, upload-time = "2025-04-15T17:44:01.025Z" }, - { url = "https://files.pythonhosted.org/packages/43/a3/10cf483ea683f9f8ab096c24bad3cce20e0d1dd9a4baa0e2093c1c962d9d/contourpy-1.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d32530b534e986374fc19eaa77fcb87e8a99e5431499949b828312bdcd20ac52", size = 1307935, upload-time = "2025-04-15T17:44:17.322Z" }, - { url = "https://files.pythonhosted.org/packages/78/73/69dd9a024444489e22d86108e7b913f3528f56cfc312b5c5727a44188471/contourpy-1.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e298e7e70cf4eb179cc1077be1c725b5fd131ebc81181bf0c03525c8abc297fd", size = 1372168, upload-time = "2025-04-15T17:44:33.43Z" }, - { url = "https://files.pythonhosted.org/packages/0f/1b/96d586ccf1b1a9d2004dd519b25fbf104a11589abfd05484ff12199cca21/contourpy-1.3.2-cp313-cp313t-win32.whl", hash = "sha256:d0e589ae0d55204991450bb5c23f571c64fe43adaa53f93fc902a84c96f52fe1", size = 189550, upload-time = "2025-04-15T17:44:37.092Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e6/6000d0094e8a5e32ad62591c8609e269febb6e4db83a1c75ff8868b42731/contourpy-1.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:78e9253c3de756b3f6a5174d024c4835acd59eb3f8e2ca13e775dbffe1558f69", size = 238214, upload-time = "2025-04-15T17:44:40.827Z" }, - { url = "https://files.pythonhosted.org/packages/33/05/b26e3c6ecc05f349ee0013f0bb850a761016d89cec528a98193a48c34033/contourpy-1.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fd93cc7f3139b6dd7aab2f26a90dde0aa9fc264dbf70f6740d498a70b860b82c", size = 265681, upload-time = "2025-04-15T17:44:59.314Z" }, - { url = "https://files.pythonhosted.org/packages/2b/25/ac07d6ad12affa7d1ffed11b77417d0a6308170f44ff20fa1d5aa6333f03/contourpy-1.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:107ba8a6a7eec58bb475329e6d3b95deba9440667c4d62b9b6063942b61d7f16", size = 315101, upload-time = "2025-04-15T17:45:04.165Z" }, - { url = "https://files.pythonhosted.org/packages/8f/4d/5bb3192bbe9d3f27e3061a6a8e7733c9120e203cb8515767d30973f71030/contourpy-1.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ded1706ed0c1049224531b81128efbd5084598f18d8a2d9efae833edbd2b40ad", size = 220599, upload-time = "2025-04-15T17:45:08.456Z" }, - { url = "https://files.pythonhosted.org/packages/ff/c0/91f1215d0d9f9f343e4773ba6c9b89e8c0cc7a64a6263f21139da639d848/contourpy-1.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f5964cdad279256c084b69c3f412b7801e15356b16efa9d78aa974041903da0", size = 266807, upload-time = "2025-04-15T17:45:15.535Z" }, - { url = "https://files.pythonhosted.org/packages/d4/79/6be7e90c955c0487e7712660d6cead01fa17bff98e0ea275737cc2bc8e71/contourpy-1.3.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b65a95d642d4efa8f64ba12558fcb83407e58a2dfba9d796d77b63ccfcaff5", size = 318729, upload-time = "2025-04-15T17:45:20.166Z" }, - { url = "https://files.pythonhosted.org/packages/87/68/7f46fb537958e87427d98a4074bcde4b67a70b04900cfc5ce29bc2f556c1/contourpy-1.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8c5acb8dddb0752bf252e01a3035b21443158910ac16a3b0d20e7fed7d534ce5", size = 221791, upload-time = "2025-04-15T17:45:24.794Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] name = "contourpy" version = "1.3.3" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'emscripten'", - "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } wheels = [ @@ -368,15 +436,46 @@ wheels = [ ] [[package]] -name = "exceptiongroup" -version = "1.3.1" +name = "datasets" +version = "4.6.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "dill" }, + { name = "filelock" }, + { name = "fsspec", extra = ["http"] }, + { name = "httpx" }, + { name = "huggingface-hub" }, + { name = "multiprocess" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pandas" }, + { name = "pyarrow" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/94/eb81c6fe32e9b6ef92223141b5a553aeff2e9456968424a8533cbe88f476/datasets-4.6.1.tar.gz", hash = "sha256:140ce500bc41939ff6ce995702d66b1f4b2ee7f117bb9b07512fab6804d4070a", size = 593865, upload-time = "2026-02-27T23:26:49.482Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/37/f0/99fe6eb530c7ee9ee1faee48059eb8a6437f80c893a496b98a78864e0fc6/datasets-4.6.1-py3-none-any.whl", hash = "sha256:f53228e6dadc9f837037b1bf3051d7d8c054abbb3eb29f1f022926e08090e0da", size = 520667, upload-time = "2026-02-27T23:26:46.855Z" }, +] + +[[package]] +name = "dill" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/80/630b4b88364e9a8c8c5797f4602d0f76ef820909ee32f0bacb9f90654042/dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", size = 186976, upload-time = "2025-04-16T00:41:48.867Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/50/3d/9373ad9c56321fdab5b41197068e1d8c25883b3fea29dd361f9b55116869/dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049", size = 119668, upload-time = "2025-04-16T00:41:47.671Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] [[package]] @@ -394,14 +493,6 @@ version = "4.61.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/94/8a28707adb00bed1bf22dac16ccafe60faf2ade353dcb32c3617ee917307/fonttools-4.61.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c7db70d57e5e1089a274cbb2b1fd635c9a24de809a231b154965d415d6c6d24", size = 2854799, upload-time = "2025-12-12T17:29:27.5Z" }, - { url = "https://files.pythonhosted.org/packages/94/93/c2e682faaa5ee92034818d8f8a8145ae73eb83619600495dcf8503fa7771/fonttools-4.61.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5fe9fd43882620017add5eabb781ebfbc6998ee49b35bd7f8f79af1f9f99a958", size = 2403032, upload-time = "2025-12-12T17:29:30.115Z" }, - { url = "https://files.pythonhosted.org/packages/f1/62/1748f7e7e1ee41aa52279fd2e3a6d0733dc42a673b16932bad8e5d0c8b28/fonttools-4.61.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8db08051fc9e7d8bc622f2112511b8107d8f27cd89e2f64ec45e9825e8288da", size = 4897863, upload-time = "2025-12-12T17:29:32.535Z" }, - { url = "https://files.pythonhosted.org/packages/69/69/4ca02ee367d2c98edcaeb83fc278d20972502ee071214ad9d8ca85e06080/fonttools-4.61.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a76d4cb80f41ba94a6691264be76435e5f72f2cb3cab0b092a6212855f71c2f6", size = 4859076, upload-time = "2025-12-12T17:29:34.907Z" }, - { url = "https://files.pythonhosted.org/packages/8c/f5/660f9e3cefa078861a7f099107c6d203b568a6227eef163dd173bfc56bdc/fonttools-4.61.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a13fc8aeb24bad755eea8f7f9d409438eb94e82cf86b08fe77a03fbc8f6a96b1", size = 4875623, upload-time = "2025-12-12T17:29:37.33Z" }, - { url = "https://files.pythonhosted.org/packages/63/d1/9d7c5091d2276ed47795c131c1bf9316c3c1ab2789c22e2f59e0572ccd38/fonttools-4.61.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b846a1fcf8beadeb9ea4f44ec5bdde393e2f1569e17d700bfc49cd69bde75881", size = 4993327, upload-time = "2025-12-12T17:29:39.781Z" }, - { url = "https://files.pythonhosted.org/packages/6f/2d/28def73837885ae32260d07660a052b99f0aa00454867d33745dfe49dbf0/fonttools-4.61.1-cp310-cp310-win32.whl", hash = "sha256:78a7d3ab09dc47ac1a363a493e6112d8cabed7ba7caad5f54dbe2f08676d1b47", size = 1502180, upload-time = "2025-12-12T17:29:42.217Z" }, - { url = "https://files.pythonhosted.org/packages/63/fa/bfdc98abb4dd2bd491033e85e3ba69a2313c850e759a6daa014bc9433b0f/fonttools-4.61.1-cp310-cp310-win_amd64.whl", hash = "sha256:eff1ac3cc66c2ac7cda1e64b4e2f3ffef474b7335f92fc3833fc632d595fcee6", size = 1550654, upload-time = "2025-12-12T17:29:44.564Z" }, { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" }, { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" }, { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" }, @@ -445,6 +536,111 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, ] +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + [[package]] name = "fsspec" version = "2026.2.0" @@ -454,6 +650,86 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e6/ab/fb21f4c939bb440104cc2b396d3be1d9b7a9fd3c6c2a53d98c45b3d7c954/fsspec-2026.2.0-py3-none-any.whl", hash = "sha256:98de475b5cb3bd66bedd5c4679e87b4fdfe1a3bf4d707b151b3c07e58c9a2437", size = 202505, upload-time = "2026-02-05T21:50:51.819Z" }, ] +[package.optional-dependencies] +http = [ + { name = "aiohttp" }, +] + +[[package]] +name = "gitdb" +version = "4.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, +] + +[[package]] +name = "gitpython" +version = "3.1.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371, upload-time = "2026-01-01T15:37:32.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620, upload-time = "2026-01-01T15:37:30.574Z" }, +] + +[[package]] +name = "grpcio" +version = "1.78.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -463,6 +739,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] +[[package]] +name = "h2" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, +] + [[package]] name = "hf-xet" version = "1.3.1" @@ -495,6 +784,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4e/46/1ba8d36f8290a4b98f78898bdce2b0e8fe6d9a59df34a1399eb61a8d877f/hf_xet-1.3.1-cp37-abi3-win_arm64.whl", hash = "sha256:851b1be6597a87036fe7258ce7578d5df3c08176283b989c3b165f94125c5097", size = 3500490, upload-time = "2026-02-25T00:58:00.667Z" }, ] +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + [[package]] name = "httpcore" version = "1.0.9" @@ -523,6 +821,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + [[package]] name = "huggingface-hub" version = "1.5.0" @@ -543,6 +846,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/74/2bc951622e2dbba1af9a460d93c51d15e458becd486e62c29cc0ccb08178/huggingface_hub-1.5.0-py3-none-any.whl", hash = "sha256:c9c0b3ab95a777fc91666111f3b3ede71c0cdced3614c553a64e98920585c4ee", size = 596261, upload-time = "2026-02-26T15:35:31.1Z" }, ] +[[package]] +name = "hydra-core" +version = "1.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "antlr4-python3-runtime" }, + { name = "omegaconf" }, + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/8e/07e42bc434a847154083b315779b0a81d567154504624e181caf2c71cd98/hydra-core-1.3.2.tar.gz", hash = "sha256:8a878ed67216997c3e9d88a8e72e7b4767e81af37afb4ea3334b269a4390a824", size = 3263494, upload-time = "2023-02-23T18:33:43.03Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/50/e0edd38dcd63fb26a8547f13d28f7a008bc4a3fd4eb4ff030673f22ad41a/hydra_core-1.3.2-py3-none-any.whl", hash = "sha256:fa0238a9e31df3373b35b0bfb672c34cc92718d21f81311d8996a16de1141d8b", size = 154547, upload-time = "2023-02-23T18:33:40.801Z" }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -564,6 +890,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jsonschema" +version = "4.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + [[package]] name = "kernels" version = "0.12.1" @@ -572,7 +925,6 @@ dependencies = [ { name = "huggingface-hub" }, { name = "packaging" }, { name = "pyyaml" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5d/c5/d9bd5f3332b288bf3e771a42b7c3c9d2af8daa58c682a84c4e0c83058d62/kernels-0.12.1.tar.gz", hash = "sha256:41e31ef167add0062cdc302a943ce287194c13b5af12b82a5fa76e7353ca0042", size = 56712, upload-time = "2026-01-26T16:15:48.319Z" } wheels = [ @@ -585,19 +937,6 @@ version = "1.4.9" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/5d/8ce64e36d4e3aac5ca96996457dcf33e34e6051492399a3f1fec5657f30b/kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b", size = 124159, upload-time = "2025-08-10T21:25:35.472Z" }, - { url = "https://files.pythonhosted.org/packages/96/1e/22f63ec454874378175a5f435d6ea1363dd33fb2af832c6643e4ccea0dc8/kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f", size = 66578, upload-time = "2025-08-10T21:25:36.73Z" }, - { url = "https://files.pythonhosted.org/packages/41/4c/1925dcfff47a02d465121967b95151c82d11027d5ec5242771e580e731bd/kiwisolver-1.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84fd60810829c27ae375114cd379da1fa65e6918e1da405f356a775d49a62bcf", size = 65312, upload-time = "2025-08-10T21:25:37.658Z" }, - { url = "https://files.pythonhosted.org/packages/d4/42/0f333164e6307a0687d1eb9ad256215aae2f4bd5d28f4653d6cd319a3ba3/kiwisolver-1.4.9-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78efa4c6e804ecdf727e580dbb9cba85624d2e1c6b5cb059c66290063bd99a9", size = 1628458, upload-time = "2025-08-10T21:25:39.067Z" }, - { url = "https://files.pythonhosted.org/packages/86/b6/2dccb977d651943995a90bfe3495c2ab2ba5cd77093d9f2318a20c9a6f59/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4efec7bcf21671db6a3294ff301d2fc861c31faa3c8740d1a94689234d1b415", size = 1225640, upload-time = "2025-08-10T21:25:40.489Z" }, - { url = "https://files.pythonhosted.org/packages/50/2b/362ebd3eec46c850ccf2bfe3e30f2fc4c008750011f38a850f088c56a1c6/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90f47e70293fc3688b71271100a1a5453aa9944a81d27ff779c108372cf5567b", size = 1244074, upload-time = "2025-08-10T21:25:42.221Z" }, - { url = "https://files.pythonhosted.org/packages/6f/bb/f09a1e66dab8984773d13184a10a29fe67125337649d26bdef547024ed6b/kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fdca1def57a2e88ef339de1737a1449d6dbf5fab184c54a1fca01d541317154", size = 1293036, upload-time = "2025-08-10T21:25:43.801Z" }, - { url = "https://files.pythonhosted.org/packages/ea/01/11ecf892f201cafda0f68fa59212edaea93e96c37884b747c181303fccd1/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cf554f21be770f5111a1690d42313e140355e687e05cf82cb23d0a721a64a48", size = 2175310, upload-time = "2025-08-10T21:25:45.045Z" }, - { url = "https://files.pythonhosted.org/packages/7f/5f/bfe11d5b934f500cc004314819ea92427e6e5462706a498c1d4fc052e08f/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1795ac5cd0510207482c3d1d3ed781143383b8cfd36f5c645f3897ce066220", size = 2270943, upload-time = "2025-08-10T21:25:46.393Z" }, - { url = "https://files.pythonhosted.org/packages/3d/de/259f786bf71f1e03e73d87e2db1a9a3bcab64d7b4fd780167123161630ad/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ccd09f20ccdbbd341b21a67ab50a119b64a403b09288c27481575105283c1586", size = 2440488, upload-time = "2025-08-10T21:25:48.074Z" }, - { url = "https://files.pythonhosted.org/packages/1b/76/c989c278faf037c4d3421ec07a5c452cd3e09545d6dae7f87c15f54e4edf/kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:540c7c72324d864406a009d72f5d6856f49693db95d1fbb46cf86febef873634", size = 2246787, upload-time = "2025-08-10T21:25:49.442Z" }, - { url = "https://files.pythonhosted.org/packages/a2/55/c2898d84ca440852e560ca9f2a0d28e6e931ac0849b896d77231929900e7/kiwisolver-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:ede8c6d533bc6601a47ad4046080d36b8fc99f81e6f1c17b0ac3c2dc91ac7611", size = 73730, upload-time = "2025-08-10T21:25:51.102Z" }, - { url = "https://files.pythonhosted.org/packages/e8/09/486d6ac523dd33b80b368247f238125d027964cfacb45c654841e88fb2ae/kiwisolver-1.4.9-cp310-cp310-win_arm64.whl", hash = "sha256:7b4da0d01ac866a57dd61ac258c5607b4cd677f63abaec7b148354d2b2cdd536", size = 65036, upload-time = "2025-08-10T21:25:52.063Z" }, { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, @@ -675,11 +1014,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, - { url = "https://files.pythonhosted.org/packages/a2/63/fde392691690f55b38d5dd7b3710f5353bf7a8e52de93a22968801ab8978/kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d1d9e582ad4d63062d34077a9a1e9f3c34088a2ec5135b1f7190c07cf366527", size = 60183, upload-time = "2025-08-10T21:27:37.669Z" }, - { url = "https://files.pythonhosted.org/packages/27/b1/6aad34edfdb7cced27f371866f211332bba215bfd918ad3322a58f480d8b/kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:deed0c7258ceb4c44ad5ec7d9918f9f14fd05b2be86378d86cf50e63d1e7b771", size = 58675, upload-time = "2025-08-10T21:27:39.031Z" }, - { url = "https://files.pythonhosted.org/packages/9d/1a/23d855a702bb35a76faed5ae2ba3de57d323f48b1f6b17ee2176c4849463/kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a590506f303f512dff6b7f75fd2fd18e16943efee932008fe7140e5fa91d80e", size = 80277, upload-time = "2025-08-10T21:27:40.129Z" }, - { url = "https://files.pythonhosted.org/packages/5a/5b/5239e3c2b8fb5afa1e8508f721bb77325f740ab6994d963e61b2b7abcc1e/kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e09c2279a4d01f099f52d5c4b3d9e208e91edcbd1a175c9662a8b16e000fece9", size = 77994, upload-time = "2025-08-10T21:27:41.181Z" }, - { url = "https://files.pythonhosted.org/packages/f9/1c/5d4d468fb16f8410e596ed0eac02d2c68752aa7dc92997fe9d60a7147665/kiwisolver-1.4.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c9e7cdf45d594ee04d5be1b24dd9d49f3d1590959b2271fb30b5ca2b262c00fb", size = 73744, upload-time = "2025-08-10T21:27:42.254Z" }, { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, @@ -705,17 +1039,6 @@ version = "3.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, - { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, - { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, - { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, - { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, - { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, @@ -789,13 +1112,11 @@ name = "matplotlib" version = "3.10.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "contourpy", version = "1.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "contourpy", version = "1.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "contourpy" }, { name = "cycler" }, { name = "fonttools" }, { name = "kiwisolver" }, - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "numpy" }, { name = "packaging" }, { name = "pillow" }, { name = "pyparsing" }, @@ -803,12 +1124,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/be/a30bd917018ad220c400169fba298f2bb7003c8ccbc0c3e24ae2aacad1e8/matplotlib-3.10.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:00270d217d6b20d14b584c521f810d60c5c78406dc289859776550df837dcda7", size = 8239828, upload-time = "2025-12-10T22:55:02.313Z" }, - { url = "https://files.pythonhosted.org/packages/58/27/ca01e043c4841078e82cf6e80a6993dfecd315c3d79f5f3153afbb8e1ec6/matplotlib-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37b3c1cc42aa184b3f738cfa18c1c1d72fd496d85467a6cf7b807936d39aa656", size = 8128050, upload-time = "2025-12-10T22:55:04.997Z" }, - { url = "https://files.pythonhosted.org/packages/cb/aa/7ab67f2b729ae6a91bcf9dcac0affb95fb8c56f7fd2b2af894ae0b0cf6fa/matplotlib-3.10.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ee40c27c795bda6a5292e9cff9890189d32f7e3a0bf04e0e3c9430c4a00c37df", size = 8700452, upload-time = "2025-12-10T22:55:07.47Z" }, - { url = "https://files.pythonhosted.org/packages/73/ae/2d5817b0acee3c49b7e7ccfbf5b273f284957cc8e270adf36375db353190/matplotlib-3.10.8-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a48f2b74020919552ea25d222d5cc6af9ca3f4eb43a93e14d068457f545c2a17", size = 9534928, upload-time = "2025-12-10T22:55:10.566Z" }, - { url = "https://files.pythonhosted.org/packages/c9/5b/8e66653e9f7c39cb2e5cab25fce4810daffa2bff02cbf5f3077cea9e942c/matplotlib-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f254d118d14a7f99d616271d6c3c27922c092dac11112670b157798b89bf4933", size = 9586377, upload-time = "2025-12-10T22:55:12.362Z" }, - { url = "https://files.pythonhosted.org/packages/e2/e2/fd0bbadf837f81edb0d208ba8f8cb552874c3b16e27cb91a31977d90875d/matplotlib-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9b587c9c7274c1613a30afabf65a272114cd6cdbe67b3406f818c79d7ab2e2a", size = 8128127, upload-time = "2025-12-10T22:55:14.436Z" }, { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, @@ -851,9 +1166,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, - { url = "https://files.pythonhosted.org/packages/f5/43/31d59500bb950b0d188e149a2e552040528c13d6e3d6e84d0cccac593dcd/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f97aeb209c3d2511443f8797e3e5a569aebb040d4f8bc79aa3ee78a8fb9e3dd8", size = 8237252, upload-time = "2025-12-10T22:56:39.529Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2c/615c09984f3c5f907f51c886538ad785cf72e0e11a3225de2c0f9442aecc/matplotlib-3.10.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fb061f596dad3a0f52b60dc6a5dec4a0c300dec41e058a7efe09256188d170b7", size = 8124693, upload-time = "2025-12-10T22:56:41.758Z" }, - { url = "https://files.pythonhosted.org/packages/91/e1/2757277a1c56041e1fc104b51a0f7b9a4afc8eb737865d63cababe30bc61/matplotlib-3.10.8-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12d90df9183093fcd479f4172ac26b322b1248b15729cb57f42f71f24c7e37a3", size = 8702205, upload-time = "2025-12-10T22:56:43.415Z" }, { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, @@ -878,125 +1190,208 @@ wheels = [ ] [[package]] -name = "networkx" -version = "3.4.2" +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "multiprocess" +version = "0.70.18" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform != 'linux'", +dependencies = [ + { name = "dill" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/fd/2ae3826f5be24c6ed87266bc4e59c46ea5b059a103f3d7e7eb76a52aeecb/multiprocess-0.70.18.tar.gz", hash = "sha256:f9597128e6b3e67b23956da07cf3d2e5cba79e2f4e0fba8d7903636663ec6d0d", size = 1798503, upload-time = "2025-04-17T03:11:27.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263, upload-time = "2024-10-21T12:39:36.247Z" }, + { url = "https://files.pythonhosted.org/packages/55/4d/9af0d1279c84618bcd35bf5fd7e371657358c7b0a523e54a9cffb87461f8/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b8940ae30139e04b076da6c5b83e9398585ebdf0f2ad3250673fef5b2ff06d6", size = 144695, upload-time = "2025-04-17T03:11:09.161Z" }, + { url = "https://files.pythonhosted.org/packages/17/bf/87323e79dd0562474fad3373c21c66bc6c3c9963b68eb2a209deb4c8575e/multiprocess-0.70.18-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0929ba95831adb938edbd5fb801ac45e705ecad9d100b3e653946b7716cb6bd3", size = 144742, upload-time = "2025-04-17T03:11:10.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/74/cb8c831e58dc6d5cf450b17c7db87f14294a1df52eb391da948b5e0a0b94/multiprocess-0.70.18-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d77f8e4bfe6c6e2e661925bbf9aed4d5ade9a1c6502d5dfc10129b9d1141797", size = 144745, upload-time = "2025-04-17T03:11:11.453Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/0cba6cf51a1a31f20471fbc823a716170c73012ddc4fb85d706630ed6e8f/multiprocess-0.70.18-py310-none-any.whl", hash = "sha256:60c194974c31784019c1f459d984e8f33ee48f10fcf42c309ba97b30d9bd53ea", size = 134948, upload-time = "2025-04-17T03:11:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/4b/88/9039f2fed1012ef584751d4ceff9ab4a51e5ae264898f0b7cbf44340a859/multiprocess-0.70.18-py311-none-any.whl", hash = "sha256:5aa6eef98e691281b3ad923be2832bf1c55dd2c859acd73e5ec53a66aae06a1d", size = 144462, upload-time = "2025-04-17T03:11:21.657Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b6/5f922792be93b82ec6b5f270bbb1ef031fd0622847070bbcf9da816502cc/multiprocess-0.70.18-py312-none-any.whl", hash = "sha256:9b78f8e5024b573730bfb654783a13800c2c0f2dfc0c25e70b40d184d64adaa2", size = 150287, upload-time = "2025-04-17T03:11:22.69Z" }, + { url = "https://files.pythonhosted.org/packages/ee/25/7d7e78e750bc1aecfaf0efbf826c69a791d2eeaf29cf20cba93ff4cced78/multiprocess-0.70.18-py313-none-any.whl", hash = "sha256:871743755f43ef57d7910a38433cfe41319e72be1bbd90b79c7a5ac523eb9334", size = 151917, upload-time = "2025-04-17T03:11:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c3/ca84c19bd14cdfc21c388fdcebf08b86a7a470ebc9f5c3c084fc2dbc50f7/multiprocess-0.70.18-py38-none-any.whl", hash = "sha256:dbf705e52a154fe5e90fb17b38f02556169557c2dd8bb084f2e06c2784d8279b", size = 132636, upload-time = "2025-04-17T03:11:24.936Z" }, + { url = "https://files.pythonhosted.org/packages/6c/28/dd72947e59a6a8c856448a5e74da6201cb5502ddff644fbc790e4bd40b9a/multiprocess-0.70.18-py39-none-any.whl", hash = "sha256:e78ca805a72b1b810c690b6b4cc32579eba34f403094bbbae962b7b5bf9dfcb8", size = 133478, upload-time = "2025-04-17T03:11:26.253Z" }, ] [[package]] name = "networkx" version = "3.6.1" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'emscripten'", - "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, ] -[[package]] -name = "numpy" -version = "2.2.6" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform != 'linux'", -] -sdist = { url = "https://files.pythonhosted.org/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd", size = 20276440, upload-time = "2025-05-17T22:38:04.611Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb", size = 21165245, upload-time = "2025-05-17T21:27:58.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90", size = 14360048, upload-time = "2025-05-17T21:28:21.406Z" }, - { url = "https://files.pythonhosted.org/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163", size = 5340542, upload-time = "2025-05-17T21:28:30.931Z" }, - { url = "https://files.pythonhosted.org/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf", size = 6878301, upload-time = "2025-05-17T21:28:41.613Z" }, - { url = "https://files.pythonhosted.org/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83", size = 14297320, upload-time = "2025-05-17T21:29:02.78Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915", size = 16801050, upload-time = "2025-05-17T21:29:27.675Z" }, - { url = "https://files.pythonhosted.org/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680", size = 15807034, upload-time = "2025-05-17T21:29:51.102Z" }, - { url = "https://files.pythonhosted.org/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289", size = 18614185, upload-time = "2025-05-17T21:30:18.703Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d", size = 6527149, upload-time = "2025-05-17T21:30:29.788Z" }, - { url = "https://files.pythonhosted.org/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3", size = 12904620, upload-time = "2025-05-17T21:30:48.994Z" }, - { url = "https://files.pythonhosted.org/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae", size = 21176963, upload-time = "2025-05-17T21:31:19.36Z" }, - { url = "https://files.pythonhosted.org/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a", size = 14406743, upload-time = "2025-05-17T21:31:41.087Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42", size = 5352616, upload-time = "2025-05-17T21:31:50.072Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491", size = 6889579, upload-time = "2025-05-17T21:32:01.712Z" }, - { url = "https://files.pythonhosted.org/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a", size = 14312005, upload-time = "2025-05-17T21:32:23.332Z" }, - { url = "https://files.pythonhosted.org/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf", size = 16821570, upload-time = "2025-05-17T21:32:47.991Z" }, - { url = "https://files.pythonhosted.org/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1", size = 15818548, upload-time = "2025-05-17T21:33:11.728Z" }, - { url = "https://files.pythonhosted.org/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab", size = 18620521, upload-time = "2025-05-17T21:33:39.139Z" }, - { url = "https://files.pythonhosted.org/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47", size = 6525866, upload-time = "2025-05-17T21:33:50.273Z" }, - { url = "https://files.pythonhosted.org/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303", size = 12907455, upload-time = "2025-05-17T21:34:09.135Z" }, - { url = "https://files.pythonhosted.org/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff", size = 20875348, upload-time = "2025-05-17T21:34:39.648Z" }, - { url = "https://files.pythonhosted.org/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c", size = 14119362, upload-time = "2025-05-17T21:35:01.241Z" }, - { url = "https://files.pythonhosted.org/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3", size = 5084103, upload-time = "2025-05-17T21:35:10.622Z" }, - { url = "https://files.pythonhosted.org/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282", size = 6625382, upload-time = "2025-05-17T21:35:21.414Z" }, - { url = "https://files.pythonhosted.org/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87", size = 14018462, upload-time = "2025-05-17T21:35:42.174Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249", size = 16527618, upload-time = "2025-05-17T21:36:06.711Z" }, - { url = "https://files.pythonhosted.org/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49", size = 15505511, upload-time = "2025-05-17T21:36:29.965Z" }, - { url = "https://files.pythonhosted.org/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de", size = 18313783, upload-time = "2025-05-17T21:36:56.883Z" }, - { url = "https://files.pythonhosted.org/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4", size = 6246506, upload-time = "2025-05-17T21:37:07.368Z" }, - { url = "https://files.pythonhosted.org/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2", size = 12614190, upload-time = "2025-05-17T21:37:26.213Z" }, - { url = "https://files.pythonhosted.org/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84", size = 20867828, upload-time = "2025-05-17T21:37:56.699Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b", size = 14143006, upload-time = "2025-05-17T21:38:18.291Z" }, - { url = "https://files.pythonhosted.org/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d", size = 5076765, upload-time = "2025-05-17T21:38:27.319Z" }, - { url = "https://files.pythonhosted.org/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566", size = 6617736, upload-time = "2025-05-17T21:38:38.141Z" }, - { url = "https://files.pythonhosted.org/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f", size = 14010719, upload-time = "2025-05-17T21:38:58.433Z" }, - { url = "https://files.pythonhosted.org/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f", size = 16526072, upload-time = "2025-05-17T21:39:22.638Z" }, - { url = "https://files.pythonhosted.org/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868", size = 15503213, upload-time = "2025-05-17T21:39:45.865Z" }, - { url = "https://files.pythonhosted.org/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d", size = 18316632, upload-time = "2025-05-17T21:40:13.331Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd", size = 6244532, upload-time = "2025-05-17T21:43:46.099Z" }, - { url = "https://files.pythonhosted.org/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c", size = 12610885, upload-time = "2025-05-17T21:44:05.145Z" }, - { url = "https://files.pythonhosted.org/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6", size = 20963467, upload-time = "2025-05-17T21:40:44Z" }, - { url = "https://files.pythonhosted.org/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda", size = 14225144, upload-time = "2025-05-17T21:41:05.695Z" }, - { url = "https://files.pythonhosted.org/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40", size = 5200217, upload-time = "2025-05-17T21:41:15.903Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8", size = 6712014, upload-time = "2025-05-17T21:41:27.321Z" }, - { url = "https://files.pythonhosted.org/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f", size = 14077935, upload-time = "2025-05-17T21:41:49.738Z" }, - { url = "https://files.pythonhosted.org/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa", size = 16600122, upload-time = "2025-05-17T21:42:14.046Z" }, - { url = "https://files.pythonhosted.org/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571", size = 15586143, upload-time = "2025-05-17T21:42:37.464Z" }, - { url = "https://files.pythonhosted.org/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1", size = 18385260, upload-time = "2025-05-17T21:43:05.189Z" }, - { url = "https://files.pythonhosted.org/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff", size = 6377225, upload-time = "2025-05-17T21:43:16.254Z" }, - { url = "https://files.pythonhosted.org/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06", size = 12771374, upload-time = "2025-05-17T21:43:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d", size = 21040391, upload-time = "2025-05-17T21:44:35.948Z" }, - { url = "https://files.pythonhosted.org/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db", size = 6786754, upload-time = "2025-05-17T21:44:47.446Z" }, - { url = "https://files.pythonhosted.org/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543", size = 16643476, upload-time = "2025-05-17T21:45:11.871Z" }, - { url = "https://files.pythonhosted.org/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00", size = 12812666, upload-time = "2025-05-17T21:45:31.426Z" }, -] - [[package]] name = "numpy" version = "2.4.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'emscripten'", - "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] sdist = { url = "https://files.pythonhosted.org/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae", size = 20723651, upload-time = "2026-01-31T23:13:10.135Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d3/44/71852273146957899753e69986246d6a176061ea183407e95418c2aa4d9a/numpy-2.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7e88598032542bd49af7c4747541422884219056c268823ef6e5e89851c8825", size = 16955478, upload-time = "2026-01-31T23:10:25.623Z" }, @@ -1113,7 +1508,7 @@ name = "nvidia-cudnn-cu12" version = "9.10.2.21" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cublas-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/fa/41/e79269ce215c857c935fd86bcfe91a451a584dfc27f1e068f568b9ad1ab7/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8", size = 705026878, upload-time = "2025-06-06T21:52:51.348Z" }, @@ -1125,7 +1520,7 @@ name = "nvidia-cufft-cu12" version = "11.3.3.83" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211, upload-time = "2025-03-07T01:44:56.873Z" }, @@ -1155,9 +1550,9 @@ name = "nvidia-cusolver-cu12" version = "11.7.3.90" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'linux'" }, - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cusparse-cu12" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841, upload-time = "2025-03-07T01:46:54.356Z" }, @@ -1169,7 +1564,7 @@ name = "nvidia-cusparse-cu12" version = "12.5.8.93" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129, upload-time = "2025-03-07T01:47:40.407Z" }, @@ -1222,101 +1617,35 @@ wheels = [ ] [[package]] -name = "packaging" -version = "26.0" +name = "omegaconf" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +dependencies = [ + { name = "antlr4-python3-runtime" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/48/6388f1bb9da707110532cb70ec4d2822858ddfb44f1cdf1233c20a80ea4b/omegaconf-2.3.0.tar.gz", hash = "sha256:d5d4b6d29955cc50ad50c46dc269bcd92c6e00f5f90d23ab5fee7bfca4ba4cc7", size = 3298120, upload-time = "2022-12-08T20:59:22.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/e3/94/1843518e420fa3ed6919835845df698c7e27e183cb997394e4a670973a65/omegaconf-2.3.0-py3-none-any.whl", hash = "sha256:7b4df175cdb08ba400f45cae3bdcae7ba8365db4d165fc65fd04b050ab63b46b", size = 79500, upload-time = "2022-12-08T20:59:19.686Z" }, ] [[package]] -name = "pandas" -version = "2.3.3" +name = "packaging" +version = "26.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.11' and sys_platform == 'linux'", - "python_full_version < '3.11' and sys_platform != 'linux'", -] -dependencies = [ - { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "python-dateutil", marker = "python_full_version < '3.11'" }, - { name = "pytz", marker = "python_full_version < '3.11'" }, - { name = "tzdata", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/f7/f425a00df4fcc22b292c6895c6831c0c8ae1d9fac1e024d16f98a9ce8749/pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c", size = 11555763, upload-time = "2025-09-29T23:16:53.287Z" }, - { url = "https://files.pythonhosted.org/packages/13/4f/66d99628ff8ce7857aca52fed8f0066ce209f96be2fede6cef9f84e8d04f/pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a", size = 10801217, upload-time = "2025-09-29T23:17:04.522Z" }, - { url = "https://files.pythonhosted.org/packages/1d/03/3fc4a529a7710f890a239cc496fc6d50ad4a0995657dccc1d64695adb9f4/pandas-2.3.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5caf26f64126b6c7aec964f74266f435afef1c1b13da3b0636c7518a1fa3e2b1", size = 12148791, upload-time = "2025-09-29T23:17:18.444Z" }, - { url = "https://files.pythonhosted.org/packages/40/a8/4dac1f8f8235e5d25b9955d02ff6f29396191d4e665d71122c3722ca83c5/pandas-2.3.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd7478f1463441ae4ca7308a70e90b33470fa593429f9d4c578dd00d1fa78838", size = 12769373, upload-time = "2025-09-29T23:17:35.846Z" }, - { url = "https://files.pythonhosted.org/packages/df/91/82cc5169b6b25440a7fc0ef3a694582418d875c8e3ebf796a6d6470aa578/pandas-2.3.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4793891684806ae50d1288c9bae9330293ab4e083ccd1c5e383c34549c6e4250", size = 13200444, upload-time = "2025-09-29T23:17:49.341Z" }, - { url = "https://files.pythonhosted.org/packages/10/ae/89b3283800ab58f7af2952704078555fa60c807fff764395bb57ea0b0dbd/pandas-2.3.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28083c648d9a99a5dd035ec125d42439c6c1c525098c58af0fc38dd1a7a1b3d4", size = 13858459, upload-time = "2025-09-29T23:18:03.722Z" }, - { url = "https://files.pythonhosted.org/packages/85/72/530900610650f54a35a19476eca5104f38555afccda1aa11a92ee14cb21d/pandas-2.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:503cf027cf9940d2ceaa1a93cfb5f8c8c7e6e90720a2850378f0b3f3b1e06826", size = 11346086, upload-time = "2025-09-29T23:18:18.505Z" }, - { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, - { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, - { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, - { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, - { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, - { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, - { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, - { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, - { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, - { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, - { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, - { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, - { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, - { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, - { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, - { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, - { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, - { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, - { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, - { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, - { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, - { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, - { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, - { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, - { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, - { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" }, - { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" }, - { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" }, - { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" }, - { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" }, - { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" }, - { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" }, - { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" }, - { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" }, - { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" }, - { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" }, - { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] name = "pandas" version = "3.0.1" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'linux'", - "python_full_version == '3.11.*' and sys_platform == 'win32'", - "python_full_version == '3.11.*' and sys_platform == 'emscripten'", - "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "python-dateutil", marker = "python_full_version >= '3.11'" }, - { name = "tzdata", marker = "(python_full_version >= '3.11' and sys_platform == 'emscripten') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } wheels = [ @@ -1375,17 +1704,6 @@ version = "12.1.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/30/5bd3d794762481f8c8ae9c80e7b76ecea73b916959eb587521358ef0b2f9/pillow-12.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f1625b72740fdda5d77b4def688eb8fd6490975d06b909fd19f13f391e077e0", size = 5304099, upload-time = "2026-02-11T04:20:06.13Z" }, - { url = "https://files.pythonhosted.org/packages/bd/c1/aab9e8f3eeb4490180e357955e15c2ef74b31f64790ff356c06fb6cf6d84/pillow-12.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:178aa072084bd88ec759052feca8e56cbb14a60b39322b99a049e58090479713", size = 4657880, upload-time = "2026-02-11T04:20:09.291Z" }, - { url = "https://files.pythonhosted.org/packages/f1/0a/9879e30d56815ad529d3985aeff5af4964202425c27261a6ada10f7cbf53/pillow-12.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b66e95d05ba806247aaa1561f080abc7975daf715c30780ff92a20e4ec546e1b", size = 6222587, upload-time = "2026-02-11T04:20:10.82Z" }, - { url = "https://files.pythonhosted.org/packages/5a/5f/a1b72ff7139e4f89014e8d451442c74a774d5c43cd938fb0a9f878576b37/pillow-12.1.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89c7e895002bbe49cdc5426150377cbbc04767d7547ed145473f496dfa40408b", size = 8027678, upload-time = "2026-02-11T04:20:12.455Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c2/c7cb187dac79a3d22c3ebeae727abee01e077c8c7d930791dc592f335153/pillow-12.1.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a5cbdcddad0af3da87cb16b60d23648bc3b51967eb07223e9fed77a82b457c4", size = 6335777, upload-time = "2026-02-11T04:20:14.441Z" }, - { url = "https://files.pythonhosted.org/packages/0c/7b/f9b09a7804ec7336effb96c26d37c29d27225783dc1501b7d62dcef6ae25/pillow-12.1.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f51079765661884a486727f0729d29054242f74b46186026582b4e4769918e4", size = 7027140, upload-time = "2026-02-11T04:20:16.387Z" }, - { url = "https://files.pythonhosted.org/packages/98/b2/2fa3c391550bd421b10849d1a2144c44abcd966daadd2f7c12e19ea988c4/pillow-12.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:99c1506ea77c11531d75e3a412832a13a71c7ebc8192ab9e4b2e355555920e3e", size = 6449855, upload-time = "2026-02-11T04:20:18.554Z" }, - { url = "https://files.pythonhosted.org/packages/96/ff/9caf4b5b950c669263c39e96c78c0d74a342c71c4f43fd031bb5cb7ceac9/pillow-12.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36341d06738a9f66c8287cf8b876d24b18db9bd8740fa0672c74e259ad408cff", size = 7151329, upload-time = "2026-02-11T04:20:20.646Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f8/4b24841f582704da675ca535935bccb32b00a6da1226820845fac4a71136/pillow-12.1.1-cp310-cp310-win32.whl", hash = "sha256:6c52f062424c523d6c4db85518774cc3d50f5539dd6eed32b8f6229b26f24d40", size = 6325574, upload-time = "2026-02-11T04:20:22.43Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f9/9f6b01c0881d7036063aa6612ef04c0e2cad96be21325a1e92d0203f8e91/pillow-12.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6008de247150668a705a6338156efb92334113421ceecf7438a12c9a12dab23", size = 7032347, upload-time = "2026-02-11T04:20:23.932Z" }, - { url = "https://files.pythonhosted.org/packages/79/13/c7922edded3dcdaf10c59297540b72785620abc0538872c819915746757d/pillow-12.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:1a9b0ee305220b392e1124a764ee4265bd063e54a751a6b62eff69992f457fa9", size = 2453457, upload-time = "2026-02-11T04:20:25.392Z" }, { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" }, { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" }, { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" }, @@ -1467,19 +1785,135 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, ] +[[package]] +name = "platformdirs" +version = "4.9.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + [[package]] name = "pyarrow" version = "23.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/a8/24e5dc6855f50a62936ceb004e6e9645e4219a8065f304145d7fb8a79d5d/pyarrow-23.0.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3fab8f82571844eb3c460f90a75583801d14ca0cc32b1acc8c361650e006fd56", size = 34307390, upload-time = "2026-02-16T10:08:08.654Z" }, - { url = "https://files.pythonhosted.org/packages/bc/8e/4be5617b4aaae0287f621ad31c6036e5f63118cfca0dc57d42121ff49b51/pyarrow-23.0.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:3f91c038b95f71ddfc865f11d5876c42f343b4495535bd262c7b321b0b94507c", size = 35853761, upload-time = "2026-02-16T10:08:17.811Z" }, - { url = "https://files.pythonhosted.org/packages/2e/08/3e56a18819462210432ae37d10f5c8eed3828be1d6c751b6e6a2e93c286a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d0744403adabef53c985a7f8a082b502a368510c40d184df349a0a8754533258", size = 44493116, upload-time = "2026-02-16T10:08:25.792Z" }, - { url = "https://files.pythonhosted.org/packages/f8/82/c40b68001dbec8a3faa4c08cd8c200798ac732d2854537c5449dc859f55a/pyarrow-23.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c33b5bf406284fd0bba436ed6f6c3ebe8e311722b441d89397c54f871c6863a2", size = 47564532, upload-time = "2026-02-16T10:08:34.27Z" }, - { url = "https://files.pythonhosted.org/packages/20/bc/73f611989116b6f53347581b02177f9f620efdf3cd3f405d0e83cdf53a83/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ddf743e82f69dcd6dbbcb63628895d7161e04e56794ef80550ac6f3315eeb1d5", size = 48183685, upload-time = "2026-02-16T10:08:42.889Z" }, - { url = "https://files.pythonhosted.org/packages/b0/cc/6c6b3ecdae2a8c3aced99956187e8302fc954cc2cca2a37cf2111dad16ce/pyarrow-23.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e052a211c5ac9848ae15d5ec875ed0943c0221e2fcfe69eee80b604b4e703222", size = 50605582, upload-time = "2026-02-16T10:08:51.641Z" }, - { url = "https://files.pythonhosted.org/packages/8d/94/d359e708672878d7638a04a0448edf7c707f9e5606cee11e15aaa5c7535a/pyarrow-23.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5abde149bb3ce524782d838eb67ac095cd3fd6090eba051130589793f1a7f76d", size = 27521148, upload-time = "2026-02-16T10:08:58.077Z" }, { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, @@ -1524,6 +1958,118 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/f2/c0e76a0b451ffdf0cf788932e182758eb7558953f4f27f1aff8e2518b653/pyarrow-23.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:527e8d899f14bd15b740cd5a54ad56b7f98044955373a17179d5956ddb93d9ce", size = 28365807, upload-time = "2026-02-16T10:14:03.892Z" }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -1555,12 +2101,12 @@ wheels = [ ] [[package]] -name = "pytz" -version = "2026.1.post1" +name = "python-dotenv" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] @@ -1569,15 +2115,6 @@ version = "6.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, - { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, - { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, - { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, - { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, - { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, @@ -1627,29 +2164,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] +[[package]] +name = "ray" +version = "2.54.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "filelock" }, + { name = "jsonschema" }, + { name = "msgpack" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "pyyaml" }, + { name = "requests" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/58/6209b2231947f3c8df09ce1436f1c76c4a11fcafd57c8def852dcbb6d8ef/ray-2.54.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:8e39dd56b47a0a1820d5a5a54385bbe54d1d67e1093736d12d8ed4e99d0fa455", size = 70098998, upload-time = "2026-02-18T04:04:58.801Z" }, + { url = "https://files.pythonhosted.org/packages/ac/29/7871f4206e6b00a9bb784c16dad32ccd01e9df5a93545db92de220eb2871/ray-2.54.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:491ae56ab80d8822c4eaf4d5bb96dcf32a6231d8d7b76eb8034400eb9be1bb18", size = 72066630, upload-time = "2026-02-18T04:05:04.957Z" }, + { url = "https://files.pythonhosted.org/packages/1d/e8/d2c8ebd9cd945abc817b01ad02a29df78cdb86cd07d764587e16977389d0/ray-2.54.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:928bb09245a3c6f7c3c113ba8eafc69f948da9602d7f33e8251ecdf97c157615", size = 72895723, upload-time = "2026-02-18T04:05:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/7e/96/a5ea3a149a943475cda1d68fdcdb14c86251826c652c232ae853600ad7e7/ray-2.54.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e786330de55b3ba2228e36ec305381a9b86f0b01a8b6072c5811c3bc4dd9a3d", size = 27448371, upload-time = "2026-02-18T04:05:16.34Z" }, + { url = "https://files.pythonhosted.org/packages/0e/16/45eefb51eb1767342a6dbf41af0b432279e422e56160705fcd1098a7ec53/ray-2.54.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:cf5c33b4b13850ec24a5bd5f9d9e0a8161f8e586bfd297e52913d170dec447fe", size = 70084880, upload-time = "2026-02-18T04:05:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/60/ad/e07aca3637e9c3ec4857ec4366208099cf8488ece8061a9925ba29b66382/ray-2.54.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:795ae21d6b764245d3f521bc5833446d58569e7dfde9c5777417eb285d87450f", size = 72107346, upload-time = "2026-02-18T04:05:27.999Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b9/cc5ea8460c3dc602e6b7198277a7c59ba2b8929374ab22efa8df9f3deac8/ray-2.54.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:a972afd5aa3dda99d0b2f369b5f62e5dd95865ab7d37bf2e0a0e0d2cfbd9b325", size = 72967230, upload-time = "2026-02-18T04:05:33.771Z" }, + { url = "https://files.pythonhosted.org/packages/de/d7/744de3b1bb881701330ddcbb2f6efaccd65915d564ece899a3838f9fb105/ray-2.54.0-cp312-cp312-win_amd64.whl", hash = "sha256:2ee074ede491d0aacfa339c003f5d7a15826e1e2a72ce873234ccbc0446e19b3", size = 27427353, upload-time = "2026-02-18T04:05:38.853Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f2/5c0161d10445e703b7d01413ab54ec1cc5e27032555279d296df89b9c4ee/ray-2.54.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5ad77961fea16c697a0fb0e51216dd39c0bec28868cde54ac668edd58d12b8ae", size = 70030991, upload-time = "2026-02-18T04:05:43.966Z" }, + { url = "https://files.pythonhosted.org/packages/fd/8c/4a4a38eaec6e9614076a96967f58540f4f8d4aa0c793f43150c5df23cb9a/ray-2.54.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:8952c23a8aa94f10728c2d16e0dc3732d09aa0e6254801757ff494984a214f45", size = 72013826, upload-time = "2026-02-18T04:05:49.866Z" }, + { url = "https://files.pythonhosted.org/packages/42/ac/e7ec2a406bd755f61c7090460fa5ab3f09b00c3c2d8db6d0b559f78a30eb/ray-2.54.0-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:ab89e6089abb6e46fb98fdd96d399b31a852d79127cd8ac00746c61d93defa2c", size = 72880209, upload-time = "2026-02-18T04:05:55.498Z" }, +] + +[package.optional-dependencies] +client = [ + { name = "grpcio" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + [[package]] name = "regex" version = "2026.2.19" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ff/c0/d8079d4f6342e4cec5c3e7d7415b5cd3e633d5f4124f7a4626908dbe84c7/regex-2026.2.19.tar.gz", hash = "sha256:6fb8cb09b10e38f3ae17cc6dc04a1df77762bd0351b6ba9041438e7cc85ec310", size = 414973, upload-time = "2026-02-19T19:03:47.899Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/de/f10b4506acfd684de4e42b0aa56ccea1a778a18864da8f6d319a40591062/regex-2026.2.19-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f5a37a17d110f9d5357a43aa7e3507cb077bf3143d1c549a45c4649e90e40a70", size = 488369, upload-time = "2026-02-19T18:59:45.01Z" }, - { url = "https://files.pythonhosted.org/packages/8b/2f/b4eaef1f0b4d0bf2a73eaf07c08f6c13422918a4180c9211ce0521746d0c/regex-2026.2.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:676c4e6847a83a1d5732b4ed553881ad36f0a8133627bb695a89ecf3571499d3", size = 290743, upload-time = "2026-02-19T18:59:48.527Z" }, - { url = "https://files.pythonhosted.org/packages/76/7c/805413bd0a88d04688c0725c222cfb811bd54a2f571004c24199a1ae55d6/regex-2026.2.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82336faeecac33297cd42857c3b36f12b91810e3fdd276befdd128f73a2b43fa", size = 288652, upload-time = "2026-02-19T18:59:50.2Z" }, - { url = "https://files.pythonhosted.org/packages/08/ff/2c4cd530a878b1975398e76faef4285f11e7c9ccf1aaedfd528bfcc1f580/regex-2026.2.19-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:52136f5b71f095cb74b736cc3a1b578030dada2e361ef2f07ca582240b703946", size = 781759, upload-time = "2026-02-19T18:59:51.836Z" }, - { url = "https://files.pythonhosted.org/packages/37/45/9608ab1b41f6740ff4076eabadde8e8b3f3400942b348ac41e8599ccc131/regex-2026.2.19-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4192464fe3e6cb0ef6751f7d3b16f886d8270d359ed1590dd555539d364f0ff7", size = 850947, upload-time = "2026-02-19T18:59:53.739Z" }, - { url = "https://files.pythonhosted.org/packages/90/3a/66471b6c4f7cac17e14bf5300e46661bba2b17ffb0871bd2759e837a6f82/regex-2026.2.19-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e561dd47a85d2660d3d3af4e6cb2da825cf20f121e577147963f875b83d32786", size = 898794, upload-time = "2026-02-19T18:59:55.993Z" }, - { url = "https://files.pythonhosted.org/packages/c2/d2/38c53929a5931f7398e5e49f5a5a3079cb2aba30119b4350608364cfad8c/regex-2026.2.19-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00ec994d7824bf01cd6c7d14c7a6a04d9aeaf7c42a2bc22d2359d715634d539b", size = 791922, upload-time = "2026-02-19T18:59:58.216Z" }, - { url = "https://files.pythonhosted.org/packages/8b/bd/b046e065630fa25059d9c195b7b5308ea94da45eee65d40879772500f74c/regex-2026.2.19-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2cb00aabd96b345d56a8c2bc328c8d6c4d29935061e05078bf1f02302e12abf5", size = 783345, upload-time = "2026-02-19T18:59:59.948Z" }, - { url = "https://files.pythonhosted.org/packages/d4/8f/045c643d2fa255a985e8f87d848e4be230b711a8935e4bdc58e60b8f7b84/regex-2026.2.19-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f374366ed35673ea81b86a8859c457d4fae6ba092b71024857e9e237410c7404", size = 768055, upload-time = "2026-02-19T19:00:01.65Z" }, - { url = "https://files.pythonhosted.org/packages/72/9f/ab7ae9f5447559562f1a788bbc85c0e526528c5e6c20542d18e4afc86aad/regex-2026.2.19-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f9417fd853fcd00b7d55167e692966dd12d95ba1a88bf08a62002ccd85030790", size = 774955, upload-time = "2026-02-19T19:00:03.368Z" }, - { url = "https://files.pythonhosted.org/packages/37/5c/f16fc23c56f60b6f4ff194604a6e53bb8aec7b6e8e4a23a482dee8d77235/regex-2026.2.19-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:12e86a01594031abf892686fcb309b041bf3de3d13d99eb7e2b02a8f3c687df1", size = 846010, upload-time = "2026-02-19T19:00:05.079Z" }, - { url = "https://files.pythonhosted.org/packages/51/c8/6be4c854135d7c9f35d4deeafdaf124b039ecb4ffcaeb7ed0495ad2c97ca/regex-2026.2.19-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:79014115e6fdf18fd9b32e291d58181bf42d4298642beaa13fd73e69810e4cb6", size = 755938, upload-time = "2026-02-19T19:00:07.148Z" }, - { url = "https://files.pythonhosted.org/packages/d6/8d/f683d49b9663a5324b95a328e69d397f6dade7cb84154eec116bf79fe150/regex-2026.2.19-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31aefac2506967b7dd69af2c58eca3cc8b086d4110b66d6ac6e9026f0ee5b697", size = 835773, upload-time = "2026-02-19T19:00:08.939Z" }, - { url = "https://files.pythonhosted.org/packages/16/cd/619224b90da09f167fe4497c350a0d0b30edc539ee9244bf93e604c073c3/regex-2026.2.19-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49cef7bb2a491f91a8869c7cdd90babf0a417047ab0bf923cd038ed2eab2ccb8", size = 780075, upload-time = "2026-02-19T19:00:10.838Z" }, - { url = "https://files.pythonhosted.org/packages/5b/88/19cfb0c262d6f9d722edef29157125418bf90eb3508186bf79335afeedae/regex-2026.2.19-cp310-cp310-win32.whl", hash = "sha256:3a039474986e7a314ace6efb9ce52f5da2bdb80ac4955358723d350ec85c32ad", size = 266004, upload-time = "2026-02-19T19:00:12.371Z" }, - { url = "https://files.pythonhosted.org/packages/82/af/5b487e0287ef72545d7ae92edecdacbe3d44e531cac24fda7de5598ba8dd/regex-2026.2.19-cp310-cp310-win_amd64.whl", hash = "sha256:5b81ff4f9cad99f90c807a00c5882fbcda86d8b3edd94e709fb531fc52cb3d25", size = 277895, upload-time = "2026-02-19T19:00:13.75Z" }, - { url = "https://files.pythonhosted.org/packages/4c/19/b6715a187ffca4d2979af92a46ce922445ba41f910bf187ccd666a2d52ef/regex-2026.2.19-cp310-cp310-win_arm64.whl", hash = "sha256:a032bc01a4bc73fc3cadba793fce28eb420da39338f47910c59ffcc11a5ba5ef", size = 270465, upload-time = "2026-02-19T19:00:15.127Z" }, { url = "https://files.pythonhosted.org/packages/6f/93/43f405a98f54cc59c786efb4fc0b644615ed2392fc89d57d30da11f35b5b/regex-2026.2.19-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:93b16a18cadb938f0f2306267161d57eb33081a861cee9ffcd71e60941eb5dfc", size = 488365, upload-time = "2026-02-19T19:00:17.857Z" }, { url = "https://files.pythonhosted.org/packages/66/46/da0efce22cd8f5ae28eeb25ac69703f49edcad3331ac22440776f4ea0867/regex-2026.2.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78af1e499cab704131f6f4e2f155b7f54ce396ca2acb6ef21a49507e4752e0be", size = 290737, upload-time = "2026-02-19T19:00:19.869Z" }, { url = "https://files.pythonhosted.org/packages/fb/19/f735078448132c1c974974d30d5306337bc297fe6b6f126164bff72c1019/regex-2026.2.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eb20c11aa4c3793c9ad04c19a972078cdadb261b8429380364be28e867a843f2", size = 288654, upload-time = "2026-02-19T19:00:21.307Z" }, @@ -1776,17 +2343,120 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] +[[package]] +name = "rpds-py" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, + { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, + { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, + { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, + { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, + { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, + { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, + { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, +] + [[package]] name = "rustbpe" version = "0.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/03/2e/f16e179ad1e185f0bb5a8fc2376fff05d1eeefcb6d8a77ee04306e8a42ae/rustbpe-0.1.0.tar.gz", hash = "sha256:18765f62ac579a9ff9e89c611f9c9b9e46bd1adde9be3f59c00b6eb4e1f28b3a", size = 29723, upload-time = "2026-01-03T22:24:11.872Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/d4/cdb13041ebfc5e10b98fa0de1d631bbce5476fe265c5e97516c344bbf44d/rustbpe-0.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:15f2e126a2b9fde264598f9317b1ec7f20ed3cf1e49cae081f7d6b7c1064655c", size = 1010629, upload-time = "2026-01-03T22:23:33.475Z" }, - { url = "https://files.pythonhosted.org/packages/3d/dd/401cb29be42b3efa80a8499ac41532bb1aa488c16cb1921dca880f0d75ce/rustbpe-0.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:34dc37f2771517813b70be47e26d5861ba69298f299dfd24409882e76bd1ccad", size = 951962, upload-time = "2026-01-03T22:23:35.025Z" }, - { url = "https://files.pythonhosted.org/packages/3b/4c/1f17ebab894dcb72e5b15389ad4b06b74637745163371339f32f23cdec76/rustbpe-0.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e7543962c932c3dcc29f5a02855ad613e9b05bc93c898fb99e4c7928598c7b", size = 1034660, upload-time = "2026-01-03T22:23:36.353Z" }, - { url = "https://files.pythonhosted.org/packages/ba/95/ab37d09e7b51b3ae49ef5af885c3d7c0244c72521f0740c6b55e1827a251/rustbpe-0.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7e90da31ce2708481c6b2169d9b8072917d7deefae4fb91cf25655dedac6afa", size = 1078818, upload-time = "2026-01-03T22:23:37.606Z" }, - { url = "https://files.pythonhosted.org/packages/94/65/79a4005477545c0661571e642d614177525e010ff9fc27cc10e579bde38d/rustbpe-0.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b92ff475c37a2b10391614c10ab2deb923bb029a8288e11e77618d5e5fcdc0e", size = 919699, upload-time = "2026-01-03T22:23:38.944Z" }, { url = "https://files.pythonhosted.org/packages/16/c1/d4fadf70d1cc0914c812a9c7c1e5cce0813440f7d16082fdb399ec33748d/rustbpe-0.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:400be6ede8875d5ac0e0ac91dfba1ec7ea7d359353b0465da633576cf01c7de7", size = 1008245, upload-time = "2026-01-03T22:23:40.245Z" }, { url = "https://files.pythonhosted.org/packages/8d/e1/ac7d4044dbee242bbcb7d9fc425f6ea8c52f984c7708cbb4cb9633976b96/rustbpe-0.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dff3ffb6f05576a27732d2013f044ec6f137bc7bce6773a5e134cfc0c24dcc82", size = 949344, upload-time = "2026-01-03T22:23:41.664Z" }, { url = "https://files.pythonhosted.org/packages/2a/7b/008e45858130eb803085d131a05e6e55c123a2b63b763ea08a45aa8b7673/rustbpe-0.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92a0186ed815ccec376cca23c4bc5f209f6c67efeb101c1c935345cd63cc9eea", size = 1031915, upload-time = "2026-01-03T22:23:42.93Z" }, @@ -1813,6 +2483,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/49/13/78d768a451dc9e634f933f2231b3fa9be524955ed84317b40e5528a2d906/rustbpe-0.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f419fd428e8ffd2430945a694cb5177706550ee5c9b16737ba860ecccd5acff", size = 1075802, upload-time = "2026-01-03T22:24:10.573Z" }, ] +[[package]] +name = "safetensors" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/9c/6e74567782559a63bd040a236edca26fd71bc7ba88de2ef35d75df3bca5e/safetensors-0.7.0.tar.gz", hash = "sha256:07663963b67e8bd9f0b8ad15bb9163606cd27cc5a1b96235a50d8369803b96b0", size = 200878, upload-time = "2025-11-19T15:18:43.199Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/47/aef6c06649039accf914afef490268e1067ed82be62bcfa5b7e886ad15e8/safetensors-0.7.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c82f4d474cf725255d9e6acf17252991c3c8aac038d6ef363a4bf8be2f6db517", size = 467781, upload-time = "2025-11-19T15:18:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/e8/00/374c0c068e30cd31f1e1b46b4b5738168ec79e7689ca82ee93ddfea05109/safetensors-0.7.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:94fd4858284736bb67a897a41608b5b0c2496c9bdb3bf2af1fa3409127f20d57", size = 447058, upload-time = "2025-11-19T15:18:34.416Z" }, + { url = "https://files.pythonhosted.org/packages/f1/06/578ffed52c2296f93d7fd2d844cabfa92be51a587c38c8afbb8ae449ca89/safetensors-0.7.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e07d91d0c92a31200f25351f4acb2bc6aff7f48094e13ebb1d0fb995b54b6542", size = 491748, upload-time = "2025-11-19T15:18:09.79Z" }, + { url = "https://files.pythonhosted.org/packages/ae/33/1debbbb70e4791dde185edb9413d1fe01619255abb64b300157d7f15dddd/safetensors-0.7.0-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8469155f4cb518bafb4acf4865e8bb9d6804110d2d9bdcaa78564b9fd841e104", size = 503881, upload-time = "2025-11-19T15:18:16.145Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1c/40c2ca924d60792c3be509833df711b553c60effbd91da6f5284a83f7122/safetensors-0.7.0-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bef08bf00a2bff599982f6b08e8770e09cc012d7bba00783fc7ea38f1fb37d", size = 623463, upload-time = "2025-11-19T15:18:21.11Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3a/13784a9364bd43b0d61eef4bea2845039bc2030458b16594a1bd787ae26e/safetensors-0.7.0-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:42cb091236206bb2016d245c377ed383aa7f78691748f3bb6ee1bfa51ae2ce6a", size = 532855, upload-time = "2025-11-19T15:18:25.719Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/429e9b1cb3fc651937727befe258ea24122d9663e4d5709a48c9cbfceecb/safetensors-0.7.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac7252938f0696ddea46f5e855dd3138444e82236e3be475f54929f0c510d48", size = 507152, upload-time = "2025-11-19T15:18:33.023Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a8/4b45e4e059270d17af60359713ffd83f97900d45a6afa73aaa0d737d48b6/safetensors-0.7.0-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1d060c70284127fa805085d8f10fbd0962792aed71879d00864acda69dbab981", size = 541856, upload-time = "2025-11-19T15:18:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/06/87/d26d8407c44175d8ae164a95b5a62707fcc445f3c0c56108e37d98070a3d/safetensors-0.7.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cdab83a366799fa730f90a4ebb563e494f28e9e92c4819e556152ad55e43591b", size = 674060, upload-time = "2025-11-19T15:18:37.211Z" }, + { url = "https://files.pythonhosted.org/packages/11/f5/57644a2ff08dc6325816ba7217e5095f17269dada2554b658442c66aed51/safetensors-0.7.0-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:672132907fcad9f2aedcb705b2d7b3b93354a2aec1b2f706c4db852abe338f85", size = 771715, upload-time = "2025-11-19T15:18:38.689Z" }, + { url = "https://files.pythonhosted.org/packages/86/31/17883e13a814bd278ae6e266b13282a01049b0c81341da7fd0e3e71a80a3/safetensors-0.7.0-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:5d72abdb8a4d56d4020713724ba81dac065fedb7f3667151c4a637f1d3fb26c0", size = 714377, upload-time = "2025-11-19T15:18:40.162Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d8/0c8a7dc9b41dcac53c4cbf9df2b9c83e0e0097203de8b37a712b345c0be5/safetensors-0.7.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0f6d66c1c538d5a94a73aa9ddca8ccc4227e6c9ff555322ea40bdd142391dd4", size = 677368, upload-time = "2025-11-19T15:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/05/e5/cb4b713c8a93469e3c5be7c3f8d77d307e65fe89673e731f5c2bfd0a9237/safetensors-0.7.0-cp38-abi3-win32.whl", hash = "sha256:c74af94bf3ac15ac4d0f2a7c7b4663a15f8c2ab15ed0fc7531ca61d0835eccba", size = 326423, upload-time = "2025-11-19T15:18:45.74Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/ec8471c8072382cb91233ba7267fd931219753bb43814cbc71757bfd4dab/safetensors-0.7.0-cp38-abi3-win_amd64.whl", hash = "sha256:d1239932053f56f3456f32eb9625590cc7582e905021f94636202a864d470755", size = 341380, upload-time = "2025-11-19T15:18:44.427Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.54.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/e9/2e3a46c304e7fa21eaa70612f60354e32699c7102eb961f67448e222ad7c/sentry_sdk-2.54.0.tar.gz", hash = "sha256:2620c2575128d009b11b20f7feb81e4e4e8ae08ec1d36cbc845705060b45cc1b", size = 413813, upload-time = "2026-03-02T15:12:41.355Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/39/be412cc86bc6247b8f69e9383d7950711bd86f8d0a4a4b0fe8fad685bc21/sentry_sdk-2.54.0-py2.py3-none-any.whl", hash = "sha256:fd74e0e281dcda63afff095d23ebcd6e97006102cdc8e78a29f19ecdf796a0de", size = 439198, upload-time = "2026-03-02T15:12:39.546Z" }, +] + [[package]] name = "setuptools" version = "82.0.0" @@ -1840,6 +2545,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] +[[package]] +name = "smmap" +version = "5.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "submitit" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cloudpickle" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/47/86/497018fb3b74e71bef45df82762b176e6b3d159f29941c20d2f141ec4096/submitit-1.5.4.tar.gz", hash = "sha256:7100848bd1cdda79c7196e54ee830793ae75fd7adde0c5bef738d72360a07508", size = 81538, upload-time = "2025-12-17T19:20:03.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/bb/711e1c2ebd18a21202c972dd5d5c8e09a921f2d3560e3a53d6350c808ab7/submitit-1.5.4-py3-none-any.whl", hash = "sha256:c26f3a7c8d4150eaf70b1da71e2023e9e9936c93e8342ed7db910f29158561c5", size = 76043, upload-time = "2025-12-17T19:20:01.941Z" }, +] + [[package]] name = "sympy" version = "1.14.0" @@ -1852,6 +2588,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] +[[package]] +name = "termcolor" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/79/cf31d7a93a8fdc6aa0fbb665be84426a8c5a557d9240b6239e9e11e35fc5/termcolor-3.3.0.tar.gz", hash = "sha256:348871ca648ec6a9a983a13ab626c0acce02f515b9e1983332b17af7979521c5", size = 14434, upload-time = "2025-12-29T12:55:21.882Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/d1/8bb87d21e9aeb323cc03034f5eaf2c8f69841e40e4853c2627edf8111ed3/termcolor-3.3.0-py3-none-any.whl", hash = "sha256:cf642efadaf0a8ebbbf4bc7a31cec2f9b5f21a9f726f4ccbb08192c9c26f43a5", size = 7734, upload-time = "2025-12-29T12:55:20.718Z" }, +] + [[package]] name = "tiktoken" version = "0.12.0" @@ -1862,13 +2607,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/b3/2cb7c17b6c4cf8ca983204255d3f1d95eda7213e247e6947a0ee2c747a2c/tiktoken-0.12.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3de02f5a491cfd179aec916eddb70331814bd6bf764075d39e21d5862e533970", size = 1051991, upload-time = "2025-10-06T20:21:34.098Z" }, - { url = "https://files.pythonhosted.org/packages/27/0f/df139f1df5f6167194ee5ab24634582ba9a1b62c6b996472b0277ec80f66/tiktoken-0.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6cfb6d9b7b54d20af21a912bfe63a2727d9cfa8fbda642fd8322c70340aad16", size = 995798, upload-time = "2025-10-06T20:21:35.579Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5d/26a691f28ab220d5edc09b9b787399b130f24327ef824de15e5d85ef21aa/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:cde24cdb1b8a08368f709124f15b36ab5524aac5fa830cc3fdce9c03d4fb8030", size = 1129865, upload-time = "2025-10-06T20:21:36.675Z" }, - { url = "https://files.pythonhosted.org/packages/b2/94/443fab3d4e5ebecac895712abd3849b8da93b7b7dec61c7db5c9c7ebe40c/tiktoken-0.12.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6de0da39f605992649b9cfa6f84071e3f9ef2cec458d08c5feb1b6f0ff62e134", size = 1152856, upload-time = "2025-10-06T20:21:37.873Z" }, - { url = "https://files.pythonhosted.org/packages/54/35/388f941251b2521c70dd4c5958e598ea6d2c88e28445d2fb8189eecc1dfc/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6faa0534e0eefbcafaccb75927a4a380463a2eaa7e26000f0173b920e98b720a", size = 1195308, upload-time = "2025-10-06T20:21:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/f8/00/c6681c7f833dd410576183715a530437a9873fa910265817081f65f9105f/tiktoken-0.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:82991e04fc860afb933efb63957affc7ad54f83e2216fe7d319007dab1ba5892", size = 1255697, upload-time = "2025-10-06T20:21:41.154Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d2/82e795a6a9bafa034bf26a58e68fe9a89eeaaa610d51dbeb22106ba04f0a/tiktoken-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:6fb2995b487c2e31acf0a9e17647e3b242235a20832642bb7a9d1a181c0c1bb1", size = 879375, upload-time = "2025-10-06T20:21:43.201Z" }, { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, @@ -1914,57 +2652,50 @@ wheels = [ ] [[package]] -name = "tomli" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, - { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, - { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, - { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, - { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, - { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, - { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, - { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, - { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, - { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, - { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, - { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, - { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, - { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, - { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, - { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, - { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, - { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, - { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, - { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, - { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, - { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, - { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, - { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, - { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, - { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, - { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, - { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, - { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, - { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, - { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, - { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, - { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, - { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, - { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, - { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, - { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, - { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, - { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +name = "tinker" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "distro" }, + { name = "httpx", extra = ["http2"] }, + { name = "numpy" }, + { name = "pydantic" }, + { name = "rich" }, + { name = "sniffio" }, + { name = "transformers" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/95/f354e47dd2f2d675b0978ae5d80c9c5f1230e5363f09a001f2d85c9e00c8/tinker-0.14.0.tar.gz", hash = "sha256:554b30c43d9cf83b41eb07775a2a9f65eb2e77bed2cd79792bd8cd0d7693e717", size = 179845, upload-time = "2026-02-27T05:50:56.415Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/76/7a7a02736f2aa667bb59bf288a0c11253a13ef8119b43c7c012f5dba1fc0/tinker-0.14.0-py3-none-any.whl", hash = "sha256:b8e4dd2c77858e80177cb0b970c5d64a7dae085b67a711cc7e1d95352534c563", size = 174812, upload-time = "2026-02-27T05:50:57.762Z" }, +] + +[[package]] +name = "tokenizers" +version = "0.22.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" }, + { url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" }, + { url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" }, + { url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" }, + { url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" }, + { url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" }, + { url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" }, + { url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" }, + { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" }, ] [[package]] @@ -1975,8 +2706,7 @@ dependencies = [ { name = "filelock" }, { name = "fsspec" }, { name = "jinja2" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, - { name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "networkx" }, { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-cuda-cupti-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-cuda-nvrtc-cu12", marker = "sys_platform == 'linux'" }, @@ -1998,9 +2728,6 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ - { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:72f0f096475e8095a6bea3fba75bd3b46cf42c761b29588f7599314e67a32661" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c8d670aa0be6fbecd2b0e7b7d514a104dbdefcc3786ca446cf0c3415043ea40a" }, - { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp310-cp310-win_amd64.whl", hash = "sha256:64399adaa8ea0896d02cf844cba3c5dd77e769520a1af73572599e0eaa2cf551" }, { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:cf4ad82430824a80a9f398e29369524ed26c152cf00c2c12002e5400b35e260d" }, { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2a1da940f0757621d098c9755f7504d791a72a40920ec85a4fd98b20253fca4e" }, { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp311-cp311-win_amd64.whl", hash = "sha256:633005a3700e81b5be0df2a7d3c1d48aced23ed927653797a3bd2b144a3aeeb6" }, @@ -2033,13 +2760,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] +[[package]] +name = "transformers" +version = "5.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "safetensors" }, + { name = "tokenizers" }, + { name = "tqdm" }, + { name = "typer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/1a/70e830d53ecc96ce69cfa8de38f163712d2b43ac52fbd743f39f56025c31/transformers-5.3.0.tar.gz", hash = "sha256:009555b364029da9e2946d41f1c5de9f15e6b1df46b189b7293f33a161b9c557", size = 8830831, upload-time = "2026-03-04T17:41:46.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/88/ae8320064e32679a5429a2c9ebbc05c2bf32cefb6e076f9b07f6d685a9b4/transformers-5.3.0-py3-none-any.whl", hash = "sha256:50ac8c89c3c7033444fb3f9f53138096b997ebb70d4b5e50a2e810bf12d3d29a", size = 10661827, upload-time = "2026-03-04T17:41:42.722Z" }, +] + [[package]] name = "triton" version = "3.5.1" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/2e/f95e673222afa2c7f0c687d8913e98fcf2589ef0b1405de76894e37fe18f/triton-3.5.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f63e34dcb32d7bd3a1d0195f60f30d2aee8b08a69a0424189b71017e23dfc3d2", size = 159821655, upload-time = "2025-11-11T17:51:44.09Z" }, - { url = "https://files.pythonhosted.org/packages/fd/6e/676ab5019b4dde8b9b7bab71245102fc02778ef3df48218b298686b9ffd6/triton-3.5.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fc53d849f879911ea13f4a877243afc513187bc7ee92d1f2c0f1ba3169e3c94", size = 170320692, upload-time = "2025-11-11T17:40:46.074Z" }, { url = "https://files.pythonhosted.org/packages/dc/dc/6ce44d055f2fc2403c4ec6b3cfd3a9b25f57b7d95efadccdea91497f8e81/triton-3.5.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da47169e30a779bade679ce78df4810fca6d78a955843d2ddb11f226adc517dc", size = 159928005, upload-time = "2025-11-11T17:51:50.008Z" }, { url = "https://files.pythonhosted.org/packages/b0/72/ec90c3519eaf168f22cb1757ad412f3a2add4782ad3a92861c9ad135d886/triton-3.5.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61413522a48add32302353fdbaaf92daaaab06f6b5e3229940d21b5207f47579", size = 170425802, upload-time = "2025-11-11T17:40:53.209Z" }, { url = "https://files.pythonhosted.org/packages/db/53/2bcc46879910991f09c063eea07627baef2bc62fe725302ba8f46a2c1ae5/triton-3.5.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:275a045b6ed670dd1bd005c3e6c2d61846c74c66f4512d6f33cc027b11de8fd4", size = 159940689, upload-time = "2025-11-11T17:51:55.938Z" }, @@ -2054,6 +2799,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/16/b5/b0d3d8b901b6a04ca38df5e24c27e53afb15b93624d7fd7d658c7cd9352a/triton-3.5.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bac7f7d959ad0f48c0e97d6643a1cc0fd5786fe61cb1f83b537c6b2d54776478", size = 170582192, upload-time = "2025-11-11T17:41:23.963Z" }, ] +[[package]] +name = "ttt-discover" +version = "0.1.0" +source = { git = "https://github.com/test-time-training/discover?rev=5df1a0ee9b04272ca33de0101ae64dd499e63f29#5df1a0ee9b04272ca33de0101ae64dd499e63f29" } +dependencies = [ + { name = "chz" }, + { name = "cloudpickle" }, + { name = "datasets" }, + { name = "fsspec" }, + { name = "huggingface-hub" }, + { name = "hydra-core" }, + { name = "numpy" }, + { name = "python-dotenv" }, + { name = "ray", extra = ["client"] }, + { name = "safetensors" }, + { name = "submitit" }, + { name = "termcolor" }, + { name = "tiktoken" }, + { name = "tinker" }, + { name = "torch" }, + { name = "tqdm" }, + { name = "transformers" }, + { name = "wandb" }, +] + [[package]] name = "typer" version = "0.24.1" @@ -2078,6 +2848,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + [[package]] name = "tzdata" version = "2025.3" @@ -2095,3 +2877,257 @@ sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6 wheels = [ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] + +[[package]] +name = "wandb" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "gitpython" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sentry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/60/d94952549920469524b689479c864c692ca47eca4b8c2fe3389b64a58778/wandb-0.25.0.tar.gz", hash = "sha256:45840495a288e34245d69d07b5a0b449220fbc5b032e6b51c4f92ec9026d2ad1", size = 43951335, upload-time = "2026-02-13T00:17:45.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/7d/0c131db3ec9deaabbd32263d90863cbfbe07659527e11c35a5c738cecdc5/wandb-0.25.0-py3-none-macosx_12_0_arm64.whl", hash = "sha256:5eecb3c7b5e60d1acfa4b056bfbaa0b79a482566a9db58c9f99724b3862bc8e5", size = 23287536, upload-time = "2026-02-13T00:17:20.265Z" }, + { url = "https://files.pythonhosted.org/packages/c3/95/31bb7f76a966ec87495e5a72ac7570685be162494c41757ac871768dbc4f/wandb-0.25.0-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:daeedaadb183dc466e634fba90ab2bab1d4e93000912be0dee95065a0624a3fd", size = 25196062, upload-time = "2026-02-13T00:17:23.356Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a1/258cdedbf30cebc692198a774cf0ef945b7ed98ee64bdaf62621281c95d8/wandb-0.25.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:5e0127dbcef13eea48f4b84268da7004d34d3120ebc7b2fa9cefb72b49dbb825", size = 22799744, upload-time = "2026-02-13T00:17:26.437Z" }, + { url = "https://files.pythonhosted.org/packages/de/91/ec9465d014cfd199c5b2083d271d31b3c2aedeae66f3d8a0712f7f54bdf3/wandb-0.25.0-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:6c4c38077836f9b7569a35b0e1dcf1f0c43616fcd936d182f475edbfea063665", size = 25262839, upload-time = "2026-02-13T00:17:28.8Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/cb2d1c7143f534544147fb53fe87944508b8cb9a058bc5b6f8a94adbee15/wandb-0.25.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6edd8948d305cb73745bf564b807bd73da2ccbd47c548196b8a362f7df40aed8", size = 22853714, upload-time = "2026-02-13T00:17:31.68Z" }, + { url = "https://files.pythonhosted.org/packages/d7/94/68163f70c1669edcf130822aaaea782d8198b5df74443eca0085ec596774/wandb-0.25.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ada6f08629bb014ad6e0a19d5dec478cdaa116431baa3f0a4bf4ab8d9893611f", size = 25358037, upload-time = "2026-02-13T00:17:34.676Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fb/9578eed2c01b2fc6c8b693da110aa9c73a33d7bb556480f5cfc42e48c94e/wandb-0.25.0-py3-none-win32.whl", hash = "sha256:020b42ca4d76e347709d65f59b30d4623a115edc28f462af1c92681cb17eae7c", size = 24604118, upload-time = "2026-02-13T00:17:37.641Z" }, + { url = "https://files.pythonhosted.org/packages/25/97/460f6cb738aaa39b4eb2e6b4c630b2ae4321cdd70a79d5955ea75a878981/wandb-0.25.0-py3-none-win_amd64.whl", hash = "sha256:78307ac0b328f2dc334c8607bec772851215584b62c439eb320c4af4fb077a00", size = 24604122, upload-time = "2026-02-13T00:17:39.991Z" }, + { url = "https://files.pythonhosted.org/packages/27/6c/5847b4dda1dfd52630dac08711d4348c69ed657f0698fc2d949c7f7a6622/wandb-0.25.0-py3-none-win_arm64.whl", hash = "sha256:c6174401fd6fb726295e98d57b4231c100eca96bd17de51bfc64038a57230aaf", size = 21785298, upload-time = "2026-02-13T00:17:42.475Z" }, +] + +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +] + +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" }, + { url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" }, + { url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" }, + { url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" }, + { url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" }, + { url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" }, + { url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" }, + { url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" }, + { url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +] From cf98e8790f9a0b83b2fec9edc7085c9901e479c8 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Mon, 9 Mar 2026 16:48:32 +1100 Subject: [PATCH 02/17] Credit TTT-Discover paper and repo in README --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index f4720dc6..e1911756 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,8 @@ The `program.md` file is essentially a super lightweight "skill". This repo also includes a thin adapter that uses [TTT-Discover](https://github.com/test-time-training/discover) as the outer RL engine for `autoresearch`. +This integration is inspired by and credits the TTT-Discover paper, [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175), and the upstream [test-time-training/discover](https://github.com/test-time-training/discover) repository. + Because upstream `ttt-discover` depends on Python 3.11+, the integrated repo now targets Python 3.11+ for both the original and TTT workflows. - The outer model proposes full replacements for `train.py`. From 290be241a15532397d904aa9e3f3a9bac3d07c7e Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Tue, 10 Mar 2026 13:31:46 +1100 Subject: [PATCH 03/17] Increase default rollout group size to 8 --- configs/ttt_discover_autoresearch.yaml | 2 +- ttt_autoresearch/config.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index fcd1f377..2d355be3 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -2,7 +2,7 @@ model_name: Qwen/Qwen3.5-35B-A3B provider: null api_base: null max_steps: 8 -samples_per_step: 4 +samples_per_step: 8 temperature: 1.0 timeout_sec: 2700 run_dir: null diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index a550a325..1bc6295b 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -18,7 +18,7 @@ class TTTAutoResearchConfig: provider: str | None = None api_base: str | None = None max_steps: int = 8 - samples_per_step: int = 4 + samples_per_step: int = 8 temperature: float = 1.0 timeout_sec: int = 2700 run_dir: str | None = None From 78ec9c84b3e192545bc2d68742e70d4bf120c943 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Tue, 10 Mar 2026 13:43:18 +1100 Subject: [PATCH 04/17] Harden TTT adapter for single-GPU rollout execution --- README.md | 5 +- configs/ttt_discover_autoresearch.yaml | 1 + tests/test_reward.py | 75 +++++++++++++++++++++++++- tests/test_runner.py | 12 +++++ ttt_autoresearch/config.py | 33 ++++++++++-- ttt_autoresearch/reward.py | 29 +++++++--- 6 files changed, 144 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index e1911756..d5be6e3f 100644 --- a/README.md +++ b/README.md @@ -95,9 +95,12 @@ Outputs are written under `runs//`: ### Config notes -- `model_name` is fully configurable and passed through to the installed `discover` backend. +- `model_name` is configurable, but the prompt/response format still needs a compatible `renderer_name`. +- Known-good renderer names are `qwen3`, `qwen3_instruct`, `gpt_oss_no_sysprompt`, `gpt_oss_low_reasoning`, `gpt_oss_medium_reasoning`, and `gpt_oss_high_reasoning`. +- For unknown model families, set both `model_name` and `renderer_name` explicitly or startup will fail fast. - `provider` and `api_base` can be set in the YAML or overridden on the CLI. - `baseline_command_override` and `candidate_command_override` let you swap the execution command without changing code. +- `max_concurrent_evaluations` defaults to `1` so grouped rollouts do not launch multiple full `train.py` jobs onto the same GPU at once. - `run_ttt_discover.py` uses the upstream `discover` trainer stack directly, but bypasses the public `discover()` model-name guard so non-GPT-OSS models such as Qwen can be used without changing the RL optimization recipe. ## Project structure diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index 2d355be3..87f6f1dc 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -21,3 +21,4 @@ num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 +max_concurrent_evaluations: 1 diff --git a/tests/test_reward.py b/tests/test_reward.py index 496360d5..4291a561 100644 --- a/tests/test_reward.py +++ b/tests/test_reward.py @@ -2,10 +2,12 @@ from pathlib import Path import tempfile +import threading +import time import unittest import sys -from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig from ttt_autoresearch.env import AutoResearchState from ttt_autoresearch.reward import AutoResearchRewardEvaluator, reward_for_result from ttt_autoresearch.runner import AutoResearchRunner, RunResult @@ -76,6 +78,77 @@ def test_evaluator_uses_inner_metric_as_reward(self) -> None: self.assertGreater(result["reward"], 0.0) self.assertEqual(result["correctness"], 1.0) + def test_concurrent_reward_calls_serialize_inner_evaluations(self) -> None: + class FakeRunner: + def __init__(self) -> None: + self.lock = threading.Lock() + self.active = 0 + self.max_seen = 0 + + def run_candidate(self, **_: object) -> RunResult: + with self.lock: + self.active += 1 + self.max_seen = max(self.max_seen, self.active) + try: + time.sleep(0.1) + finally: + with self.lock: + self.active -= 1 + return RunResult( + status="success", + val_bpb=0.9, + stdout_path=Path("stdout.log"), + stderr_path=Path("stderr.log"), + elapsed_sec=0.1, + workspace_path=Path("."), + metrics_path=None, + command=["python", "train.py"], + returncode=0, + ) + + def update_best(self, **_: object) -> bool: + return False + + def append_history(self, _: dict[str, object]) -> None: + return None + + def read_text(self, _: Path, max_chars: int = 4000) -> str: + return "" + + bootstrap = BootstrapContext( + repo_root=Path("."), + run_dir=Path("."), + config=TTTAutoResearchConfig(max_concurrent_evaluations=1).normalized(Path(".")), + program_text="program", + baseline_train_py="train", + baseline_val_bpb=1.0, + ) + runner = FakeRunner() + AutoResearchRewardEvaluator.configure(bootstrap, runner) # type: ignore[arg-type] + evaluator = AutoResearchRewardEvaluator(problem_type="autoresearch", log_dir=".") + payload = '{"summary":"improve","rationale":"lower loss","train_py":"print(1)\\n"}' + + def make_state() -> AutoResearchState: + return AutoResearchState( + timestep=-1, + construction=[], + code="print(0)\n", + value=-1.0, + baseline_val_bpb=1.0, + current_best_val_bpb=1.0, + ) + + threads = [ + threading.Thread(target=evaluator.get_reward, args=(payload, make_state())) + for _ in range(2) + ] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + self.assertEqual(runner.max_seen, 1) + if __name__ == "__main__": unittest.main() diff --git a/tests/test_runner.py b/tests/test_runner.py index b9e974fd..65b69885 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -80,6 +80,18 @@ def test_config_normalizes_relative_paths_and_overrides_env(self) -> None: else: os.environ["OPENAI_BASE_URL"] = old_base + def test_unknown_model_requires_explicit_renderer(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + with self.assertRaises(ValueError): + TTTAutoResearchConfig(model_name="meta-llama/Meta-Llama-3-70B").normalized(root) + + config = TTTAutoResearchConfig( + model_name="meta-llama/Meta-Llama-3-70B", + renderer_name="gpt_oss_high_reasoning", + ).normalized(root) + self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") + if __name__ == "__main__": unittest.main() diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 1bc6295b..28920155 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -10,6 +10,14 @@ DISCOVER_GIT_REV = "5df1a0ee9b04272ca33de0101ae64dd499e63f29" +SUPPORTED_RENDERERS = ( + "qwen3", + "qwen3_instruct", + "gpt_oss_no_sysprompt", + "gpt_oss_low_reasoning", + "gpt_oss_medium_reasoning", + "gpt_oss_high_reasoning", +) @dataclass(slots=True) @@ -37,6 +45,7 @@ class TTTAutoResearchConfig: eval_timeout: int | None = None local_model_path: str | None = None keep_history: int = 6 + max_concurrent_evaluations: int = 1 def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": run_dir = _resolve_path(self.run_dir, repo_root) if self.run_dir else repo_root / "runs" / datetime.now().strftime("%Y%m%d_%H%M%S") @@ -54,7 +63,7 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": baseline_command_override=_normalize_command(self.baseline_command_override), candidate_command_override=_normalize_command(self.candidate_command_override), experiment_name=experiment_name, - renderer_name=self.renderer_name or infer_renderer_name(self.model_name), + renderer_name=resolve_renderer_name(self.model_name, self.renderer_name), learning_rate=self.learning_rate, lora_rank=self.lora_rank, kl_penalty_coef=self.kl_penalty_coef, @@ -65,6 +74,7 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": eval_timeout=self.eval_timeout or self.timeout_sec, local_model_path=_resolve_optional_path_str(self.local_model_path, repo_root), keep_history=self.keep_history, + max_concurrent_evaluations=max(1, int(self.max_concurrent_evaluations)), ) def to_dict(self) -> dict[str, Any]: @@ -109,7 +119,7 @@ def subprocess_env(self) -> dict[str, str]: return env -def infer_renderer_name(model_name: str) -> str: +def infer_renderer_name(model_name: str) -> str | None: lowered = model_name.lower() if "qwen" in lowered: if "instruct" in lowered: @@ -117,7 +127,24 @@ def infer_renderer_name(model_name: str) -> str: return "qwen3" if "gpt-oss" in lowered: return "gpt_oss_high_reasoning" - return "qwen3" + return None + + +def resolve_renderer_name(model_name: str, renderer_name: str | None) -> str: + if renderer_name is not None: + if renderer_name not in SUPPORTED_RENDERERS: + supported = ", ".join(SUPPORTED_RENDERERS) + raise ValueError(f"Unsupported renderer_name={renderer_name!r}. Supported values: {supported}.") + return renderer_name + + inferred = infer_renderer_name(model_name) + if inferred is None: + supported = ", ".join(SUPPORTED_RENDERERS) + raise ValueError( + f"Could not infer a renderer for model_name={model_name!r}. " + f"Set renderer_name explicitly to one of: {supported}." + ) + return inferred def load_config(path: str | os.PathLike[str], repo_root: str | os.PathLike[str] | None = None) -> TTTAutoResearchConfig: diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py index a0ce0416..e80c8507 100644 --- a/ttt_autoresearch/reward.py +++ b/ttt_autoresearch/reward.py @@ -10,6 +10,7 @@ _ARTIFACT_LOCK = threading.Lock() +_EVALUATION_SLOTS: threading.BoundedSemaphore | None = None def reward_for_result(current_best_val_bpb: float, result: RunResult) -> tuple[float, float]: @@ -30,8 +31,10 @@ class AutoResearchRewardEvaluator(BaseRewardEvaluator): @classmethod def configure(cls, bootstrap: BootstrapContext, runner: AutoResearchRunner) -> None: + global _EVALUATION_SLOTS cls.bootstrap = bootstrap cls.runner = runner + _EVALUATION_SLOTS = threading.BoundedSemaphore(bootstrap.config.max_concurrent_evaluations) def __init__(self, *args: Any, **kwargs: Any) -> None: self.problem_type = kwargs.get("problem_type", "autoresearch") @@ -53,12 +56,7 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: status="invalid_candidate", ) - result = self.runner.run_candidate( - bootstrap=self.bootstrap, - candidate=candidate, - step=getattr(state, "timestep", -1) + 1, - state_id=getattr(state, "id", "unknown"), - ) + result = self._run_candidate(candidate, state) current_best = self._current_best_from_state(state) reward, correctness = reward_for_result(current_best, result) improved_global_best = False @@ -110,6 +108,25 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: }, } + def _run_candidate(self, candidate: PatchCandidate, state: Any) -> RunResult: + if self.bootstrap is None or self.runner is None: + raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + if _EVALUATION_SLOTS is None: + raise RuntimeError("AutoResearchRewardEvaluator evaluation slots are not configured.") + + # Grouped rollouts stay enabled for the upstream entropic advantage recipe, + # but inner autoresearch training runs must be serialized on a single GPU. + _EVALUATION_SLOTS.acquire() + try: + return self.runner.run_candidate( + bootstrap=self.bootstrap, + candidate=candidate, + step=getattr(state, "timestep", -1) + 1, + state_id=getattr(state, "id", "unknown"), + ) + finally: + _EVALUATION_SLOTS.release() + @staticmethod def _build_message(candidate: PatchCandidate, result: RunResult, current_best: float, reward: float) -> str: val_bpb = "n/a" if result.val_bpb is None else f"{result.val_bpb:.6f}" From 741b4da33e84a9d787394940182a915b959e50a4 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Tue, 10 Mar 2026 13:52:17 +1100 Subject: [PATCH 05/17] Rewrite README for TTT-Discover autoresearch fork --- README.md | 239 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 153 insertions(+), 86 deletions(-) diff --git a/README.md b/README.md index d5be6e3f..1c23170c 100644 --- a/README.md +++ b/README.md @@ -1,146 +1,213 @@ -# autoresearch +# Test Time RL Discover + Auto Research ![teaser](progress.png) -*One day, frontier AI research used to be done by meat computers in between eating, sleeping, having other fun, and synchronizing once in a while using sound wave interconnect in the ritual of "group meeting". That era is long gone. Research is now entirely the domain of autonomous swarms of AI agents running across compute cluster megastructures in the skies. The agents claim that we are now in the 10,205th generation of the code base, in any case no one could tell if that's right or wrong as the "code" is now a self-modifying binary that has grown beyond human comprehension. This repo is the story of how it all began. -@karpathy, March 2026*. +This repo is a focused fork of [karpathy/autoresearch](https://github.com/karpathy/autoresearch) that replaces the ad hoc outer experimentation loop with [TTT-Discover](https://github.com/test-time-training/discover). -The idea: give an AI agent a small but real LLM training setup and let it experiment autonomously overnight. It modifies the code, trains for 5 minutes, checks if the result improved, keeps or discards, and repeats. You wake up in the morning to a log of experiments and (hopefully) a better model. The training code here is a simplified single-GPU implementation of [nanochat](https://github.com/karpathy/nanochat). The core idea is that you're not touching any of the Python files like you normally would as a researcher. Instead, you are programming the `program.md` Markdown files that provide context to the AI agents and set up your autonomous research org. The default `program.md` in this repo is intentionally kept as a bare bones baseline, though it's obvious how one would iterate on it over time to find the "research org code" that achieves the fastest research progress, how you'd add more agents to the mix, etc. A bit more context on this project is here in this [tweet](https://x.com/karpathy/status/2029701092347630069). +The core idea is: -## How it works +- The **inner loop** is still `autoresearch`: edit `train.py`, run a fixed-budget training job, measure `val_bpb`. +- The **outer loop** is now **test-time RL** from TTT-Discover. +- The outer model proposes full replacements for `train.py`. +- The resulting inner-loop metric improvement becomes the reward used to update the outer model online. -The repo is deliberately kept small and only really has a three files that matter: +This keeps the original spirit of autoresearch, but makes the search policy itself train during the run. -- **`prepare.py`** — fixed constants, one-time data prep (downloads training data, trains a BPE tokenizer), and runtime utilities (dataloader, evaluation). Not modified. -- **`train.py`** — the single file the agent edits. Contains the full GPT model, optimizer (Muon + AdamW), and training loop. Everything is fair game: architecture, hyperparameters, optimizer, batch size, etc. **This file is edited and iterated on by the agent**. -- **`program.md`** — baseline instructions for one agent. Point your agent here and let it go. **This file is edited and iterated on by the human**. +## Credits -By design, training runs for a **fixed 5-minute time budget** (wall clock, excluding startup/compilation), regardless of the details of your compute. The metric is **val_bpb** (validation bits per byte) — lower is better, and vocab-size-independent so architectural changes are fairly compared. +This project is derived from: -## Quick start +- [karpathy/autoresearch](https://github.com/karpathy/autoresearch) +- [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175) +- [test-time-training/discover](https://github.com/test-time-training/discover) -**Requirements:** A single NVIDIA GPU (tested on H100), Python 3.11+, [uv](https://docs.astral.sh/uv/). +The RL optimization recipe is intended to stay with upstream `discover`; this repo mainly provides the autoresearch-specific environment, reward, runner, and usage wrapper. -```bash +## What This Repo Does -# 1. Install uv project manager (if you don't already have it) -curl -LsSf https://astral.sh/uv/install.sh | sh +The repo has two layers: -# 2. Install dependencies -uv sync +1. **Inner optimization target** + - `prepare.py` downloads data and trains the tokenizer. + - `train.py` is the only file the outer model edits. + - `val_bpb` is the optimization metric. Lower is better. -# 3. Download data and train tokenizer (one-time, ~2 min) -uv run prepare.py +2. **Outer TTT-Discover loop** + - `run_ttt_discover.py` launches the test-time RL run. + - `ttt_autoresearch/` adapts autoresearch to the `discover` environment interface. + - Each candidate `train.py` is executed in an isolated workspace. + - Reward is computed from `current_best_val_bpb - candidate_val_bpb`. -# 4. Manually run a single training experiment (~5 min) -uv run train.py -``` +## Repository Layout -If the above commands all work ok, your setup is working and you can go into autonomous research mode. +```text +prepare.py Fixed data prep and runtime utilities +train.py Inner training program edited by the outer model +program.md Human-authored research instructions/context +run_ttt_discover.py Main TTT-Discover entrypoint +ttt_autoresearch/ Adapter layer for environment, reward, runner, config +configs/ Ready-to-run YAML config +tests/ Smoke and unit coverage for the adapter +``` -## Running the agent +## How The RL Loop Works -Simply spin up your Claude/Codex or whatever you want in this repo (and disable all permissions), then you can prompt something like: +At each outer-loop step: -``` -Hi have a look at program.md and let's kick off a new experiment! let's do the setup first. -``` +1. TTT-Discover samples a group of candidate `train.py` replacements. +2. Each candidate is evaluated by running a real autoresearch training job. +3. The resulting `val_bpb` is parsed from the run logs. +4. Reward is computed from improvement over the current best state. +5. Upstream `discover` performs the online RL update. +6. If a candidate improves `val_bpb`, it becomes the new best `train.py`. -The `program.md` file is essentially a super lightweight "skill". +Important details: -## TTT-Discover mode +- The **action** is the full replacement contents of `train.py`. +- The **reward** is the inner-loop metric outcome, not the patch text. +- The implementation keeps grouped rollouts for the upstream entropic advantage recipe. +- Inner evaluations are serialized by default with `max_concurrent_evaluations: 1` so multiple full training jobs do not fight over the same GPU. -This repo also includes a thin adapter that uses [TTT-Discover](https://github.com/test-time-training/discover) as the outer RL engine for `autoresearch`. +## Quick Start -This integration is inspired by and credits the TTT-Discover paper, [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175), and the upstream [test-time-training/discover](https://github.com/test-time-training/discover) repository. +**Requirements** -Because upstream `ttt-discover` depends on Python 3.11+, the integrated repo now targets Python 3.11+ for both the original and TTT workflows. - -- The outer model proposes full replacements for `train.py`. -- Each candidate `train.py` is executed in an isolated workspace. -- The inner run's `val_bpb` becomes the reward signal for the outer model. -- The implementation keeps the `discover` RL recipe intact: online LoRA updates, grouped rollouts, KL control, and state reuse through the upstream sampler. +- Linux +- A single NVIDIA GPU +- Python 3.11+ +- [uv](https://docs.astral.sh/uv/) -### Quickstart +Install and prepare the base autoresearch environment: ```bash -# 1. Install dependencies, including the pinned ttt-discover dependency +# 1. Install dependencies uv sync -# 2. Prepare data and tokenizer once +# 2. Download data and train the tokenizer uv run prepare.py -# 3. Launch TTT-Discover outer-loop RL +# 3. Sanity check the original inner loop +uv run train.py +``` + +Then launch the outer TTT-Discover loop: + +```bash uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml ``` -The default outer model target is `Qwen/Qwen3.5-35B-A3B`. To swap models, edit one field in [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml): +## Default Configuration + +The default config lives at [configs/ttt_discover_autoresearch.yaml](configs/ttt_discover_autoresearch.yaml). + +Current defaults: + +- `model_name: Qwen/Qwen3.5-35B-A3B` +- `samples_per_step: 8` +- `max_steps: 8` +- `temperature: 1.0` +- `max_concurrent_evaluations: 1` + +That means the RL loop samples grouped candidates for the upstream TTT recipe, but only one full inner autoresearch training run executes at a time on the local machine. + +## Model and Renderer Configuration + +The model is configurable, but the prompt/response format must match a supported renderer. + +Known-good renderer values: + +- `qwen3` +- `qwen3_instruct` +- `gpt_oss_no_sysprompt` +- `gpt_oss_low_reasoning` +- `gpt_oss_medium_reasoning` +- `gpt_oss_high_reasoning` + +Examples: ```yaml model_name: Qwen/Qwen3.5-35B-A3B +renderer_name: qwen3 ``` -For example: - ```yaml model_name: openai/gpt-oss-120b +renderer_name: gpt_oss_high_reasoning ``` -Outputs are written under `runs//`: +If you use an unknown model family, you should set `renderer_name` explicitly. The config now fails fast if it cannot infer a compatible renderer. -- `baseline.json` for the original `train.py` -- `history.jsonl` for every accepted and rejected candidate -- `best/train.py` for the best discovered replacement -- `best/metrics.json` for the best run metadata -- `candidates/` for per-candidate isolated workspaces and logs +## Output Artifacts -### Config notes +Each run writes artifacts under `runs//`: -- `model_name` is configurable, but the prompt/response format still needs a compatible `renderer_name`. -- Known-good renderer names are `qwen3`, `qwen3_instruct`, `gpt_oss_no_sysprompt`, `gpt_oss_low_reasoning`, `gpt_oss_medium_reasoning`, and `gpt_oss_high_reasoning`. -- For unknown model families, set both `model_name` and `renderer_name` explicitly or startup will fail fast. -- `provider` and `api_base` can be set in the YAML or overridden on the CLI. -- `baseline_command_override` and `candidate_command_override` let you swap the execution command without changing code. -- `max_concurrent_evaluations` defaults to `1` so grouped rollouts do not launch multiple full `train.py` jobs onto the same GPU at once. -- `run_ttt_discover.py` uses the upstream `discover` trainer stack directly, but bypasses the public `discover()` model-name guard so non-GPT-OSS models such as Qwen can be used without changing the RL optimization recipe. +- `baseline.json` + - baseline execution metadata for the original `train.py` +- `resolved_config.json` + - the fully resolved runtime config +- `history.jsonl` + - one line per evaluated candidate +- `best/train.py` + - the current best discovered inner-loop program +- `best/metrics.json` + - the best run metadata and metric +- `candidates/` + - isolated workspaces with stdout/stderr and per-candidate files +- `discover_log/` + - upstream sampler/checkpoint/log state from `ttt-discover` -## Project structure +## Inner Loop Assumptions -``` -prepare.py — constants, data prep + runtime utilities (do not modify) -train.py — model, optimizer, training loop (agent modifies this) -program.md — agent instructions -pyproject.toml — dependencies -run_ttt_discover.py — TTT-Discover entrypoint for outer-loop RL -ttt_autoresearch/ — thin autoresearch environment/reward adapter for discover -``` +This repo intentionally keeps the inner autoresearch setup small: + +- `prepare.py` remains fixed. +- `train.py` is the only file the outer model edits. +- Training still uses the original fixed wall-clock budget from autoresearch. +- `val_bpb` remains the optimization target because it is stable across vocabulary and architecture changes. + +## Design Choices + +### Why only `train.py`? -## Design choices +Because that matches the original autoresearch framing and keeps the action space bounded. It also makes it easier to attribute reward to specific inner-loop changes. -- **Single file to modify.** The agent only touches `train.py`. This keeps the scope manageable and diffs reviewable. -- **Fixed time budget.** Training always runs for exactly 5 minutes, regardless of your specific platform. This means you can expect approx 12 experiments/hour and approx 100 experiments while you sleep. There are two upsides of this design decision. First, this makes experiments directly comparable regardless of what the agent changes (model size, batch size, architecture, etc). Second, this means that autoresearch will find the most optimal model for your platform in that time budget. The downside is that your runs (and results) become not comparable to other people running on other compute platforms. -- **Self-contained.** No external dependencies beyond PyTorch and a few small packages. No distributed training, no complex configs. One GPU, one file, one metric. +### Why grouped rollouts? + +Because upstream `discover` uses grouped rollouts for its entropic advantage estimation and reuse behavior. This repo keeps that outer-loop recipe. + +### Why serialize inner evaluations? + +Because unlike some upstream `discover` tasks, each rollout here is an actual GPU training job. Running several `train.py` jobs concurrently on one GPU would distort the reward surface and often fail operationally. + +## Plain AutoResearch Mode Still Works + +This fork does not remove the original autoresearch workflow. You can still use it directly: + +```bash +uv run prepare.py +uv run train.py +``` -## Platform support +The TTT-Discover path is an additional outer loop, not a replacement for the inner codebase. -This code currently requires that you have a single NVIDIA GPU. In principle it is quite possible to support CPU, MPS and other platforms but this would also bloat the code. I'm not 100% sure that I want to take this on personally right now. People can reference (or have their agents reference) the full/parent nanochat repository that has wider platform support and shows the various solutions (e.g. a Flash Attention 3 kernels fallback implementation, generic device support, autodetection, etc.), feel free to create forks or discussions for other platforms and I'm happy to link to them here in the README in some new notable forks section or etc. +## Current Readiness -Seeing as there seems to be a lot of interest in tinkering with autoresearch on much smaller compute platforms than an H100, a few extra words. If you're going to try running autoresearch on smaller computers (Macbooks etc.), I'd recommend one of the forks below. On top of this, here are some recommendations for how to tune the defaults for much smaller models for aspiring forks: +What is tested locally: -1. To get half-decent results I'd use a dataset with a lot less entropy, e.g. this [TinyStories dataset](https://huggingface.co/datasets/karpathy/tinystories-gpt4-clean). These are GPT-4 generated short stories. Because the data is a lot narrower in scope, you will see reasonable results with a lot smaller models (if you try to sample from them after training). -2. You might experiment with decreasing `vocab_size`, e.g. from 8192 down to 4096, 2048, 1024, or even - simply byte-level tokenizer with 256 possibly bytes after utf-8 encoding. -3. In `prepare.py`, you'll want to lower `MAX_SEQ_LEN` a lot, depending on the computer even down to 256 etc. As you lower `MAX_SEQ_LEN`, you may want to experiment with increasing `DEVICE_BATCH_SIZE` in `train.py` slightly to compensate. The number of tokens per fwd/bwd pass is the product of these two. -4. Also in `prepare.py`, you'll want to decrease `EVAL_TOKENS` so that your validation loss is evaluated on a lot less data. -5. In `train.py`, the primary single knob that controls model complexity is the `DEPTH` (default 8, here). A lot of variables are just functions of this, so e.g. lower it down to e.g. 4. -6. You'll want to most likely use `WINDOW_PATTERN` of just "L", because "SSSL" uses alternating banded attention pattern that may be very inefficient for you. Try it. -7. You'll want to lower `TOTAL_BATCH_SIZE` a lot, but keep it powers of 2, e.g. down to `2**14` (~16K) or so even, hard to tell. +- config loading and override behavior +- reward mapping +- candidate parsing +- environment prompt and state flow +- CLI wiring into upstream `discover` +- serialization of inner evaluations -I think these would be the reasonable hyperparameters to play with. Ask your favorite coding agent for help and copy paste them this guide, as well as the full source code. +What is still environment-dependent: -## Notable forks +- a true end-to-end production run on the target Linux/CUDA machine +- provider-specific model serving details +- real-world throughput and stability under long TTT sessions -- [miolini/autoresearch-macos](https://github.com/miolini/autoresearch-macos) (MacOS) -- [trevin-creator/autoresearch-mlx](https://github.com/trevin-creator/autoresearch-mlx) (MacOS) -- [jsegov/autoresearch-win-rtx](https://github.com/jsegov/autoresearch-win-rtx) (Windows) +So the repo is structurally ready for the intended setup, but final operational confidence still comes from a real GPU run on the target hardware. ## License From 3a06a1883c3679cf0c738bd1eb38f2f2ecd57d55 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Tue, 10 Mar 2026 14:42:28 +1100 Subject: [PATCH 06/17] Adopt 8x8x50 TTT defaults and document scaling costs --- README.md | 167 +++++++++++++++++++++++-- configs/ttt_discover_autoresearch.yaml | 4 +- tests/test_cli_integration.py | 2 + tests/test_reward.py | 12 ++ tests/test_runner.py | 11 ++ ttt_autoresearch/cli.py | 2 +- ttt_autoresearch/config.py | 12 +- ttt_autoresearch/reward.py | 25 +++- ttt_autoresearch/runner.py | 5 + 9 files changed, 228 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 1c23170c..238f81a9 100644 --- a/README.md +++ b/README.md @@ -66,14 +66,16 @@ Important details: - The **action** is the full replacement contents of `train.py`. - The **reward** is the inner-loop metric outcome, not the patch text. - The implementation keeps grouped rollouts for the upstream entropic advantage recipe. -- Inner evaluations are serialized by default with `max_concurrent_evaluations: 1` so multiple full training jobs do not fight over the same GPU. +- The default config is now paper-shaped: `8 groups x 8 rollouts x 50 steps`. +- In this repo, groups are controlled by `groups_per_step` and rollouts within each group are controlled by `samples_per_step`. +- The checked-in default keeps `max_concurrent_evaluations: 1` for safety; to scale on rented hardware, you raise concurrency and declare explicit `gpu_devices`. ## Quick Start **Requirements** - Linux -- A single NVIDIA GPU +- NVIDIA GPUs - Python 3.11+ - [uv](https://docs.astral.sh/uv/) @@ -103,12 +105,161 @@ The default config lives at [configs/ttt_discover_autoresearch.yaml](configs/ttt Current defaults: - `model_name: Qwen/Qwen3.5-35B-A3B` +- `groups_per_step: 8` - `samples_per_step: 8` -- `max_steps: 8` +- `max_steps: 50` - `temperature: 1.0` - `max_concurrent_evaluations: 1` -That means the RL loop samples grouped candidates for the upstream TTT recipe, but only one full inner autoresearch training run executes at a time on the local machine. +That means the default run is: + +- `8 groups` +- `8 rollouts per group` +- `64 total inner evaluations per step` +- `50 outer RL steps` +- but only `1` inner evaluation runs at a time unless you explicitly provision more GPUs + +This keeps the paper-shaped RL structure while remaining safe to launch on limited hardware. + +## Recommended Hardware + +If your goal is to match the spirit of the original autoresearch setup and push toward the best `val_bpb`, the inner loop should run on **H100 80GB** class GPUs. + +Why: + +- [train.py](/Users/aumdesai/AutoResearch-Discover/train.py) uses Hopper-specific FA3 kernels when available. +- [program.md](/Users/aumdesai/AutoResearch-Discover/program.md) shows representative peak VRAM around `45 GB`. +- `A100 40GB` is therefore not sufficient. + +Recommended inner-loop rental target: + +- **Best cost/performance:** H100 PCIe 80GB +- **Best absolute performance:** H100 SXM 80GB + +For the paper-shaped default config, the natural operational target is: + +- **64 H100 80GB GPUs** for inner evaluations +- one rollout per GPU +- one full outer step in roughly one inner-training wave + +If you have fewer GPUs, the run still works, but each outer step takes multiple waves. +To use more than one GPU safely, you should set: + +```yaml +max_concurrent_evaluations: 64 +gpu_devices: ["0", "1", "2", "3", "..."] +``` + +The runner now pins each candidate subprocess to one configured `CUDA_VISIBLE_DEVICES` slot. + +## Cost Model + +There are two separate cost buckets: + +1. **Inner-loop GPU rental** + - pays for the actual `train.py` runs + - this dominates total cost in this repo + +2. **Outer-loop Tinker cost** + - pays for model prefill, sampling, and RL training tokens + - this is comparatively small here because the inner rollouts are expensive + +### Tinker Cost + +Using the official Tinker pricing for `Qwen/Qwen3.5-35B-A3B`: + +- prefill: `$0.36 / 1M tokens` +- sample: `$0.89 / 1M tokens` +- train: `$1.07 / 1M tokens` + +And using this repo's actual prompt/output sizes, a practical estimate is: + +- about **`$0.017-$0.024` per rollout** +- about **`$0.020` per rollout** as a reasonable midpoint + +So for the default paper-shaped config: + +- `8 x 8 x 50 = 3200 total rollouts` +- estimated Tinker cost: about **`$54-$77`** +- midpoint estimate: about **`$65`** + +### GPU Rental Cost + +Using H100 PCIe 80GB pricing of roughly **`$2.86 / GPU / hour`**, and assuming one inner rollout takes roughly `325.9s` end to end: + +- each rollout costs about **`$0.259`** in GPU rental +- `3200` rollouts costs about **`$829`** in GPU rental + +That means a fully provisioned `8 x 8 x 50` run is roughly: + +- **GPU rental:** about `$829` +- **Tinker:** about `$65` +- **Total:** about **`$894`** + +This is directionally consistent with the TTT-Discover paper's statement that runs cost a few hundred dollars to several hundred dollars per problem, with this repo skewing more expensive on the inner loop because each rollout is a real GPU training job. + +### Cost Distribution + +For this repo, the cost split is roughly: + +- **~90% GPU rental** +- **~10% Tinker** + +That is the opposite of many lightweight code-generation settings. Here, the expensive part is the real autoresearch evaluation. + +## How I Recommend Running It + +### If you want the paper-shaped run + +Use the paper-shaped structure and rent: + +- **64x H100 PCIe 80GB** + +Set: + +```yaml +groups_per_step: 8 +samples_per_step: 8 +max_steps: 50 +max_concurrent_evaluations: 64 +gpu_devices: ["0", "1", "2", ..., "63"] +``` + +This gives: + +- `8 groups x 8 rollouts` +- one GPU per rollout +- about one rollout wave per step +- wall-clock of roughly `50 x 5.4 minutes`, plus overhead + +This is the closest clean operational match to the repo default. + +### If you want a cheaper but still strong run + +Use: + +- `groups_per_step: 8` +- `samples_per_step: 8` +- `max_steps: 8` to `16` +- `max_concurrent_evaluations` equal to however many GPUs you actually rented + +This preserves the paper-like group structure while cutting total spend materially. + +### If you only have one GPU + +The checked-in config is already safe in the sense that it runs with one evaluation slot, but it will be extremely slow at full `8 x 8 x 50`. + +Instead reduce to something like: + +```yaml +groups_per_step: 1 +samples_per_step: 8 +max_steps: 8 +max_concurrent_evaluations: 1 +gpu_devices: null +``` + +That is much slower and less faithful to the paper, but operationally sane on one machine. ## Model and Renderer Configuration @@ -158,7 +309,7 @@ Each run writes artifacts under `runs//`: ## Inner Loop Assumptions -This repo intentionally keeps the inner autoresearch setup small: +This repo intentionally keeps the inner autoresearch target small even though the outer RL setup can be large: - `prepare.py` remains fixed. - `train.py` is the only file the outer model edits. @@ -175,9 +326,9 @@ Because that matches the original autoresearch framing and keeps the action spac Because upstream `discover` uses grouped rollouts for its entropic advantage estimation and reuse behavior. This repo keeps that outer-loop recipe. -### Why serialize inner evaluations? +### Why allow large concurrent inner evaluation now? -Because unlike some upstream `discover` tasks, each rollout here is an actual GPU training job. Running several `train.py` jobs concurrently on one GPU would distort the reward surface and often fail operationally. +Because the default configuration is no longer targeting a single local GPU. It is targeting rented multi-GPU execution where one rollout can be assigned to one GPU, which restores fair rollout timing and keeps the paper-like grouped rollout structure. ## Plain AutoResearch Mode Still Works @@ -199,7 +350,7 @@ What is tested locally: - candidate parsing - environment prompt and state flow - CLI wiring into upstream `discover` -- serialization of inner evaluations +- concurrency gating for inner evaluations What is still environment-dependent: diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index 87f6f1dc..b69dbe76 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,7 +1,8 @@ model_name: Qwen/Qwen3.5-35B-A3B provider: null api_base: null -max_steps: 8 +max_steps: 50 +groups_per_step: 8 samples_per_step: 8 temperature: 1.0 timeout_sec: 2700 @@ -22,3 +23,4 @@ eval_timeout: 2700 local_model_path: null keep_history: 6 max_concurrent_evaluations: 1 +gpu_devices: null diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py index a919dc6a..28714d17 100644 --- a/tests/test_cli_integration.py +++ b/tests/test_cli_integration.py @@ -86,6 +86,7 @@ async def fake_discover_main(cfg): "model_name: Qwen/Qwen3.5-35B-A3B", f"run_dir: {run_dir}", "max_steps: 3", + "groups_per_step: 3", "samples_per_step: 2", "baseline_command_override:", f" - {sys.executable}", @@ -110,6 +111,7 @@ async def fake_discover_main(cfg): self.assertTrue(captured.get("dataset_builder_called")) self.assertEqual(captured["rl_config"]["model_name"], "Qwen/Qwen3.5-35B-A3B") self.assertEqual(captured["rl_config"]["num_epochs"], 3) + self.assertEqual(captured["dataset_config"]["batch_size"], 3) self.assertEqual(captured["dataset_config"]["group_size"], 2) self.assertEqual(captured["dataset_config"]["problem_type"], "autoresearch") finally: diff --git a/tests/test_reward.py b/tests/test_reward.py index 4291a561..bcdb4a3d 100644 --- a/tests/test_reward.py +++ b/tests/test_reward.py @@ -149,6 +149,18 @@ def make_state() -> AutoResearchState: self.assertEqual(runner.max_seen, 1) + def test_parallel_evaluations_require_explicit_gpu_devices(self) -> None: + bootstrap = BootstrapContext( + repo_root=Path("."), + run_dir=Path("."), + config=TTTAutoResearchConfig(max_concurrent_evaluations=2).normalized(Path(".")), + program_text="program", + baseline_train_py="train", + baseline_val_bpb=1.0, + ) + with self.assertRaises(ValueError): + AutoResearchRewardEvaluator.configure(bootstrap, object()) # type: ignore[arg-type] + if __name__ == "__main__": unittest.main() diff --git a/tests/test_runner.py b/tests/test_runner.py index 65b69885..fad5ea56 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -92,6 +92,17 @@ def test_unknown_model_requires_explicit_renderer(self) -> None: ).normalized(root) self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") + def test_group_defaults_reflect_paper_shaped_config(self) -> None: + config = TTTAutoResearchConfig().normalized(Path(".")) + self.assertEqual(config.max_steps, 50) + self.assertEqual(config.groups_per_step, 8) + self.assertEqual(config.samples_per_step, 8) + self.assertEqual(config.max_concurrent_evaluations, 1) + + def test_gpu_devices_are_normalized(self) -> None: + config = TTTAutoResearchConfig(gpu_devices=[0, 3, 7]).normalized(Path(".")) + self.assertEqual(config.gpu_devices, ["0", "3", "7"]) + if __name__ == "__main__": unittest.main() diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py index a31a8aaa..34559926 100644 --- a/ttt_autoresearch/cli.py +++ b/ttt_autoresearch/cli.py @@ -55,7 +55,7 @@ def main(argv: list[str] | None = None) -> int: dataset_config = DatasetConfig( env_type=AutoResearchDiscoverEnv, problem_type="autoresearch", - batch_size=1, + batch_size=config.groups_per_step, group_size=config.samples_per_step, model_name_for_tokenizer=config.local_model_path or config.model_name, renderer_name=config.renderer_name, diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 28920155..3367001e 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -25,7 +25,8 @@ class TTTAutoResearchConfig: model_name: str = "Qwen/Qwen3.5-35B-A3B" provider: str | None = None api_base: str | None = None - max_steps: int = 8 + max_steps: int = 50 + groups_per_step: int = 8 samples_per_step: int = 8 temperature: float = 1.0 timeout_sec: int = 2700 @@ -46,6 +47,7 @@ class TTTAutoResearchConfig: local_model_path: str | None = None keep_history: int = 6 max_concurrent_evaluations: int = 1 + gpu_devices: list[str] | None = None def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": run_dir = _resolve_path(self.run_dir, repo_root) if self.run_dir else repo_root / "runs" / datetime.now().strftime("%Y%m%d_%H%M%S") @@ -55,6 +57,7 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": provider=self.provider, api_base=self.api_base, max_steps=self.max_steps, + groups_per_step=max(1, int(self.groups_per_step)), samples_per_step=self.samples_per_step, temperature=self.temperature, timeout_sec=self.timeout_sec, @@ -75,6 +78,7 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": local_model_path=_resolve_optional_path_str(self.local_model_path, repo_root), keep_history=self.keep_history, max_concurrent_evaluations=max(1, int(self.max_concurrent_evaluations)), + gpu_devices=_normalize_string_list(self.gpu_devices), ) def to_dict(self) -> dict[str, Any]: @@ -165,6 +169,12 @@ def _normalize_command(command: list[str] | str | None) -> list[str] | None: return [str(part) for part in command] +def _normalize_string_list(values: list[str] | list[int] | tuple[str, ...] | tuple[int, ...] | None) -> list[str] | None: + if values is None: + return None + return [str(value) for value in values] + + def _resolve_path(path_value: str | os.PathLike[str], repo_root: Path) -> Path: path = Path(path_value).expanduser() if path.is_absolute(): diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py index e80c8507..d33f30dc 100644 --- a/ttt_autoresearch/reward.py +++ b/ttt_autoresearch/reward.py @@ -1,6 +1,7 @@ from __future__ import annotations from pathlib import Path +import queue import threading from typing import Any @@ -11,6 +12,7 @@ _ARTIFACT_LOCK = threading.Lock() _EVALUATION_SLOTS: threading.BoundedSemaphore | None = None +_GPU_DEVICE_QUEUE: queue.Queue[str] | None = None def reward_for_result(current_best_val_bpb: float, result: RunResult) -> tuple[float, float]: @@ -31,10 +33,25 @@ class AutoResearchRewardEvaluator(BaseRewardEvaluator): @classmethod def configure(cls, bootstrap: BootstrapContext, runner: AutoResearchRunner) -> None: - global _EVALUATION_SLOTS + global _EVALUATION_SLOTS, _GPU_DEVICE_QUEUE cls.bootstrap = bootstrap cls.runner = runner _EVALUATION_SLOTS = threading.BoundedSemaphore(bootstrap.config.max_concurrent_evaluations) + _GPU_DEVICE_QUEUE = None + gpu_devices = bootstrap.config.gpu_devices or [] + if bootstrap.config.max_concurrent_evaluations > 1: + if not gpu_devices: + raise ValueError( + "max_concurrent_evaluations > 1 requires gpu_devices to be set so candidate runs can be pinned to distinct GPUs." + ) + if bootstrap.config.max_concurrent_evaluations > len(gpu_devices): + raise ValueError( + "max_concurrent_evaluations cannot exceed the number of configured gpu_devices." + ) + if gpu_devices: + _GPU_DEVICE_QUEUE = queue.Queue() + for gpu_device in gpu_devices: + _GPU_DEVICE_QUEUE.put(gpu_device) def __init__(self, *args: Any, **kwargs: Any) -> None: self.problem_type = kwargs.get("problem_type", "autoresearch") @@ -117,14 +134,20 @@ def _run_candidate(self, candidate: PatchCandidate, state: Any) -> RunResult: # Grouped rollouts stay enabled for the upstream entropic advantage recipe, # but inner autoresearch training runs must be serialized on a single GPU. _EVALUATION_SLOTS.acquire() + gpu_device: str | None = None try: + if _GPU_DEVICE_QUEUE is not None: + gpu_device = _GPU_DEVICE_QUEUE.get() return self.runner.run_candidate( bootstrap=self.bootstrap, candidate=candidate, step=getattr(state, "timestep", -1) + 1, state_id=getattr(state, "id", "unknown"), + gpu_device=gpu_device, ) finally: + if _GPU_DEVICE_QUEUE is not None and gpu_device is not None: + _GPU_DEVICE_QUEUE.put(gpu_device) _EVALUATION_SLOTS.release() @staticmethod diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index ccc0faa6..0fd7ef19 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -117,6 +117,7 @@ def run_candidate( candidate: PatchCandidate, step: int, state_id: str, + gpu_device: str | None = None, ) -> RunResult: workspace = self.run_dir / "candidates" / f"{step:04d}_{uuid.uuid4().hex[:8]}" self._copy_repo(workspace) @@ -127,6 +128,7 @@ def run_candidate( bootstrap=bootstrap, label=f"candidate-{step:04d}", state_id=state_id, + gpu_device=gpu_device, ) return result @@ -187,9 +189,12 @@ def _execute_workspace( bootstrap: BootstrapContext | None, label: str, state_id: str | None = None, + gpu_device: str | None = None, ) -> RunResult: command = self._resolve_command(command_template, workspace, bootstrap, label, state_id) env = bootstrap.subprocess_env() if bootstrap else dict(os.environ) + if gpu_device is not None: + env["CUDA_VISIBLE_DEVICES"] = gpu_device stdout_path = workspace / "stdout.log" stderr_path = workspace / "stderr.log" metrics_path = workspace / "metrics.json" From 78a55c9b88be186070210616e8c0f07ce58898f5 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Tue, 10 Mar 2026 15:24:14 +1100 Subject: [PATCH 07/17] Adopt practical TTT presets and docs --- README.md | 303 ++++++++---------- configs/ttt_discover_autoresearch.yaml | 4 +- configs/ttt_discover_autoresearch_large.yaml | 26 ++ configs/ttt_discover_autoresearch_medium.yaml | 26 ++ configs/ttt_discover_autoresearch_small.yaml | 26 ++ tests/test_runner.py | 6 +- ttt_autoresearch/config.py | 4 +- 7 files changed, 219 insertions(+), 176 deletions(-) create mode 100644 configs/ttt_discover_autoresearch_large.yaml create mode 100644 configs/ttt_discover_autoresearch_medium.yaml create mode 100644 configs/ttt_discover_autoresearch_small.yaml diff --git a/README.md b/README.md index 238f81a9..40f38446 100644 --- a/README.md +++ b/README.md @@ -2,41 +2,40 @@ ![teaser](progress.png) -This repo is a focused fork of [karpathy/autoresearch](https://github.com/karpathy/autoresearch) that replaces the ad hoc outer experimentation loop with [TTT-Discover](https://github.com/test-time-training/discover). +This repo is a focused fork of [karpathy/autoresearch](https://github.com/karpathy/autoresearch) that replaces the outer experiment loop with [TTT-Discover](https://github.com/test-time-training/discover). -The core idea is: +The setup is: -- The **inner loop** is still `autoresearch`: edit `train.py`, run a fixed-budget training job, measure `val_bpb`. -- The **outer loop** is now **test-time RL** from TTT-Discover. -- The outer model proposes full replacements for `train.py`. -- The resulting inner-loop metric improvement becomes the reward used to update the outer model online. +- The **inner loop** is still AutoResearch: edit [`train.py`](train.py), run a fixed-budget training job, and measure `val_bpb`. +- The **outer loop** is TTT-Discover: the model proposes full replacements for `train.py`, sees the resulting metric, and is reinforced online from that reward. +- The reward is strictly the inner-loop outcome: `current_best_val_bpb - candidate_val_bpb`. -This keeps the original spirit of autoresearch, but makes the search policy itself train during the run. +This fork keeps the original AutoResearch target and uses TTT-Discover as the policy improvement layer. ## Credits -This project is derived from: +This project builds on: - [karpathy/autoresearch](https://github.com/karpathy/autoresearch) - [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175) - [test-time-training/discover](https://github.com/test-time-training/discover) -The RL optimization recipe is intended to stay with upstream `discover`; this repo mainly provides the autoresearch-specific environment, reward, runner, and usage wrapper. +The RL recipe stays with upstream `discover`. This repo provides the AutoResearch-specific environment, reward, runner, configs, and practical launch workflow. -## What This Repo Does +## What This Repo Optimizes The repo has two layers: 1. **Inner optimization target** - - `prepare.py` downloads data and trains the tokenizer. - - `train.py` is the only file the outer model edits. + - [`prepare.py`](prepare.py) downloads data and trains the tokenizer. + - [`train.py`](train.py) is the only file the outer model edits. - `val_bpb` is the optimization metric. Lower is better. 2. **Outer TTT-Discover loop** - - `run_ttt_discover.py` launches the test-time RL run. - - `ttt_autoresearch/` adapts autoresearch to the `discover` environment interface. + - [`run_ttt_discover.py`](run_ttt_discover.py) launches the run. + - [`ttt_autoresearch/`](ttt_autoresearch/) adapts AutoResearch to the `discover` environment interface. - Each candidate `train.py` is executed in an isolated workspace. - - Reward is computed from `current_best_val_bpb - candidate_val_bpb`. + - Reward is computed from the measured improvement over the current best state. ## Repository Layout @@ -46,7 +45,7 @@ train.py Inner training program edited by the outer model program.md Human-authored research instructions/context run_ttt_discover.py Main TTT-Discover entrypoint ttt_autoresearch/ Adapter layer for environment, reward, runner, config -configs/ Ready-to-run YAML config +configs/ Practical preset YAML configs tests/ Smoke and unit coverage for the adapter ``` @@ -54,9 +53,9 @@ tests/ Smoke and unit coverage for the adapter At each outer-loop step: -1. TTT-Discover samples a group of candidate `train.py` replacements. -2. Each candidate is evaluated by running a real autoresearch training job. -3. The resulting `val_bpb` is parsed from the run logs. +1. TTT-Discover samples grouped candidate replacements for `train.py`. +2. Each candidate is evaluated by running a real AutoResearch training job. +3. The run logs are parsed for `val_bpb`. 4. Reward is computed from improvement over the current best state. 5. Upstream `discover` performs the online RL update. 6. If a candidate improves `val_bpb`, it becomes the new best `train.py`. @@ -64,11 +63,10 @@ At each outer-loop step: Important details: - The **action** is the full replacement contents of `train.py`. -- The **reward** is the inner-loop metric outcome, not the patch text. -- The implementation keeps grouped rollouts for the upstream entropic advantage recipe. -- The default config is now paper-shaped: `8 groups x 8 rollouts x 50 steps`. -- In this repo, groups are controlled by `groups_per_step` and rollouts within each group are controlled by `samples_per_step`. -- The checked-in default keeps `max_concurrent_evaluations: 1` for safety; to scale on rented hardware, you raise concurrency and declare explicit `gpu_devices`. +- The **reward** is the inner-loop metric outcome, not a heuristic about the patch text. +- `groups_per_step` controls how many rollout groups are sampled at each RL step. +- `samples_per_step` controls how many rollouts are sampled inside each group. +- `max_concurrent_evaluations` controls how many expensive inner `train.py` jobs may run at once. ## Quick Start @@ -79,191 +77,188 @@ Important details: - Python 3.11+ - [uv](https://docs.astral.sh/uv/) -Install and prepare the base autoresearch environment: +Install and prepare the base AutoResearch environment: ```bash -# 1. Install dependencies uv sync - -# 2. Download data and train the tokenizer uv run prepare.py - -# 3. Sanity check the original inner loop uv run train.py ``` -Then launch the outer TTT-Discover loop: +Then launch the default practical TTT-Discover mode: ```bash uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml ``` -## Default Configuration +## Training Presets + +This repo now ships with three practical presets instead of a paper-scale default. + +### Small + +File: [`configs/ttt_discover_autoresearch_small.yaml`](configs/ttt_discover_autoresearch_small.yaml) + +- `groups_per_step: 2` +- `samples_per_step: 4` +- `max_steps: 12` +- total evaluations: `96` -The default config lives at [configs/ttt_discover_autoresearch.yaml](configs/ttt_discover_autoresearch.yaml). +Use this when: -Current defaults: +- you want a quick sanity run +- you are testing a new model backend +- you are on a single GPU and want something that finishes in a reasonable time -- `model_name: Qwen/Qwen3.5-35B-A3B` -- `groups_per_step: 8` +### Medium + +File: [`configs/ttt_discover_autoresearch_medium.yaml`](configs/ttt_discover_autoresearch_medium.yaml) + +- `groups_per_step: 2` - `samples_per_step: 8` -- `max_steps: 50` -- `temperature: 1.0` -- `max_concurrent_evaluations: 1` +- `max_steps: 12` +- total evaluations: `192` -That means the default run is: +This is the **recommended main mode** for the repo. -- `8 groups` -- `8 rollouts per group` -- `64 total inner evaluations per step` -- `50 outer RL steps` -- but only `1` inner evaluation runs at a time unless you explicitly provision more GPUs +It is also the checked-in default at [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml). -This keeps the paper-shaped RL structure while remaining safe to launch on limited hardware. +### Large -## Recommended Hardware +File: [`configs/ttt_discover_autoresearch_large.yaml`](configs/ttt_discover_autoresearch_large.yaml) -If your goal is to match the spirit of the original autoresearch setup and push toward the best `val_bpb`, the inner loop should run on **H100 80GB** class GPUs. +- `groups_per_step: 2` +- `samples_per_step: 8` +- `max_steps: 20` +- total evaluations: `320` -Why: +Use this when the medium run is already stable and you want more policy updates without moving into paper-scale compute. -- [train.py](/Users/aumdesai/AutoResearch-Discover/train.py) uses Hopper-specific FA3 kernels when available. -- [program.md](/Users/aumdesai/AutoResearch-Discover/program.md) shows representative peak VRAM around `45 GB`. -- `A100 40GB` is therefore not sufficient. +## Recommended Modes -Recommended inner-loop rental target: +For this fork, the most realistic settings are: -- **Best cost/performance:** H100 PCIe 80GB -- **Best absolute performance:** H100 SXM 80GB +- **Small:** `2 x 4 x 12` +- **Medium:** `2 x 8 x 12` +- **Large:** `2 x 8 x 20` -For the paper-shaped default config, the natural operational target is: +These are intentionally sized around the practical AutoResearch regime, where each rollout is a real GPU training job. They keep grouped rollouts and online RL updates from TTT-Discover, but avoid the extreme compute profile of the original paper. -- **64 H100 80GB GPUs** for inner evaluations -- one rollout per GPU -- one full outer step in roughly one inner-training wave +## Hardware Recommendation -If you have fewer GPUs, the run still works, but each outer step takes multiple waves. -To use more than one GPU safely, you should set: +If your goal is to push `val_bpb` seriously, the inner loop should run on **H100 80GB** class GPUs. -```yaml -max_concurrent_evaluations: 64 -gpu_devices: ["0", "1", "2", "3", "..."] -``` +Why: -The runner now pins each candidate subprocess to one configured `CUDA_VISIBLE_DEVICES` slot. +- [`train.py`](train.py) uses Hopper-specific FA3 kernels when available. +- [`program.md`](program.md) shows representative peak VRAM around `45 GB`. +- `A100 40GB` is therefore not viable for the intended setup. -## Cost Model +Recommended inner-loop target: -There are two separate cost buckets: +- **Best cost/performance:** H100 PCIe 80GB +- **Best absolute performance:** H100 SXM 80GB -1. **Inner-loop GPU rental** - - pays for the actual `train.py` runs - - this dominates total cost in this repo +For these practical presets, I recommend: -2. **Outer-loop Tinker cost** - - pays for model prefill, sampling, and RL training tokens - - this is comparatively small here because the inner rollouts are expensive +- **Small (`2x4x12`)**: rent `8x H100 80GB` +- **Medium (`2x8x12`)**: rent `16x H100 80GB` +- **Large (`2x8x20`)**: rent `16x H100 80GB` -### Tinker Cost +That gives one GPU per rollout in a step wave. If you rent fewer GPUs, the run still works, but each step is split into multiple waves and takes longer. -Using the official Tinker pricing for `Qwen/Qwen3.5-35B-A3B`: +To run with rented GPUs, set: -- prefill: `$0.36 / 1M tokens` -- sample: `$0.89 / 1M tokens` -- train: `$1.07 / 1M tokens` +```yaml +max_concurrent_evaluations: 16 +gpu_devices: ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15"] +``` -And using this repo's actual prompt/output sizes, a practical estimate is: +The runner pins each candidate subprocess to one configured `CUDA_VISIBLE_DEVICES` slot. -- about **`$0.017-$0.024` per rollout** -- about **`$0.020` per rollout** as a reasonable midpoint +## Cost Model -So for the default paper-shaped config: +There are two cost buckets: -- `8 x 8 x 50 = 3200 total rollouts` -- estimated Tinker cost: about **`$54-$77`** -- midpoint estimate: about **`$65`** +1. **Inner-loop GPU rental** + - pays for the real `train.py` runs + - dominates total cost in this repo -### GPU Rental Cost +2. **Outer-loop Tinker cost** + - pays for prompt prefill, sampling, and RL training tokens + - is much smaller than the inner-loop GPU cost here -Using H100 PCIe 80GB pricing of roughly **`$2.86 / GPU / hour`**, and assuming one inner rollout takes roughly `325.9s` end to end: +### Cost Assumptions -- each rollout costs about **`$0.259`** in GPU rental -- `3200` rollouts costs about **`$829`** in GPU rental +The estimates below use: -That means a fully provisioned `8 x 8 x 50` run is roughly: +- `Qwen/Qwen3.5-35B-A3B` on Tinker +- H100 PCIe 80GB at about `$2.86 / GPU / hour` +- about `325.9s` per inner rollout +- about `$0.020` Tinker cost per rollout as a practical midpoint for this repo -- **GPU rental:** about `$829` -- **Tinker:** about `$65` -- **Total:** about **`$894`** +### Preset Cost Estimates -This is directionally consistent with the TTT-Discover paper's statement that runs cost a few hundred dollars to several hundred dollars per problem, with this repo skewing more expensive on the inner loop because each rollout is a real GPU training job. +| Mode | Shape | Total evals | GPU rental | Tinker | Total | +|---|---:|---:|---:|---:|---:| +| Small | `2x4x12` | 96 | about `$25` | about `$1.9` | about `$27` | +| Medium | `2x8x12` | 192 | about `$50` | about `$3.8` | about `$54` | +| Large | `2x8x20` | 320 | about `$83` | about `$6.4` | about `$89` | ### Cost Distribution -For this repo, the cost split is roughly: +For these realistic runs, the cost split is still roughly: -- **~90% GPU rental** -- **~10% Tinker** +- **~92% GPU rental** +- **~8% Tinker** -That is the opposite of many lightweight code-generation settings. Here, the expensive part is the real autoresearch evaluation. +That is the core difference between this repo and cheaper code-generation tasks: each rollout is a real training job. ## How I Recommend Running It -### If you want the paper-shaped run - -Use the paper-shaped structure and rent: - -- **64x H100 PCIe 80GB** +### Single GPU -Set: +Use the small preset, and keep evaluation serialized: ```yaml -groups_per_step: 8 -samples_per_step: 8 -max_steps: 50 -max_concurrent_evaluations: 64 -gpu_devices: ["0", "1", "2", ..., "63"] +groups_per_step: 2 +samples_per_step: 4 +max_steps: 12 +max_concurrent_evaluations: 1 +gpu_devices: null ``` -This gives: - -- `8 groups x 8 rollouts` -- one GPU per rollout -- about one rollout wave per step -- wall-clock of roughly `50 x 5.4 minutes`, plus overhead +This is the safest way to stay close to the original one-GPU AutoResearch style while still using the TTT-Discover framework. -This is the closest clean operational match to the repo default. +### Practical Rented Run -### If you want a cheaper but still strong run +Use the medium preset: -Use: +```bash +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_medium.yaml +``` -- `groups_per_step: 8` -- `samples_per_step: 8` -- `max_steps: 8` to `16` -- `max_concurrent_evaluations` equal to however many GPUs you actually rented +Recommended provisioning: -This preserves the paper-like group structure while cutting total spend materially. +- `16x H100 PCIe 80GB` +- `max_concurrent_evaluations: 16` +- `gpu_devices` set to the visible devices on the host -### If you only have one GPU +This is the main mode I recommend if your goal is to beat the baseline without exploding compute. -The checked-in config is already safe in the sense that it runs with one evaluation slot, but it will be extremely slow at full `8 x 8 x 50`. +### Larger Budget Run -Instead reduce to something like: +Use the large preset: -```yaml -groups_per_step: 1 -samples_per_step: 8 -max_steps: 8 -max_concurrent_evaluations: 1 -gpu_devices: null +```bash +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_large.yaml ``` -That is much slower and less faithful to the paper, but operationally sane on one machine. +This keeps the same grouped structure as medium, but increases the number of RL updates from `12` to `20`. ## Model and Renderer Configuration -The model is configurable, but the prompt/response format must match a supported renderer. +The model is configurable, but the prompt and response format must match a supported renderer. Known-good renderer values: @@ -286,53 +281,23 @@ model_name: openai/gpt-oss-120b renderer_name: gpt_oss_high_reasoning ``` -If you use an unknown model family, you should set `renderer_name` explicitly. The config now fails fast if it cannot infer a compatible renderer. +If you use an unknown model family, set `renderer_name` explicitly. The config fails fast if it cannot infer a compatible renderer. ## Output Artifacts Each run writes artifacts under `runs//`: - `baseline.json` - - baseline execution metadata for the original `train.py` - `resolved_config.json` - - the fully resolved runtime config - `history.jsonl` - - one line per evaluated candidate - `best/train.py` - - the current best discovered inner-loop program - `best/metrics.json` - - the best run metadata and metric - `candidates/` - - isolated workspaces with stdout/stderr and per-candidate files - `discover_log/` - - upstream sampler/checkpoint/log state from `ttt-discover` - -## Inner Loop Assumptions - -This repo intentionally keeps the inner autoresearch target small even though the outer RL setup can be large: - -- `prepare.py` remains fixed. -- `train.py` is the only file the outer model edits. -- Training still uses the original fixed wall-clock budget from autoresearch. -- `val_bpb` remains the optimization target because it is stable across vocabulary and architecture changes. - -## Design Choices - -### Why only `train.py`? - -Because that matches the original autoresearch framing and keeps the action space bounded. It also makes it easier to attribute reward to specific inner-loop changes. - -### Why grouped rollouts? - -Because upstream `discover` uses grouped rollouts for its entropic advantage estimation and reuse behavior. This repo keeps that outer-loop recipe. - -### Why allow large concurrent inner evaluation now? - -Because the default configuration is no longer targeting a single local GPU. It is targeting rented multi-GPU execution where one rollout can be assigned to one GPU, which restores fair rollout timing and keeps the paper-like grouped rollout structure. ## Plain AutoResearch Mode Still Works -This fork does not remove the original autoresearch workflow. You can still use it directly: +This fork does not remove the original AutoResearch workflow. You can still use it directly: ```bash uv run prepare.py @@ -356,9 +321,9 @@ What is still environment-dependent: - a true end-to-end production run on the target Linux/CUDA machine - provider-specific model serving details -- real-world throughput and stability under long TTT sessions +- long-run throughput and stability on rented multi-GPU hardware -So the repo is structurally ready for the intended setup, but final operational confidence still comes from a real GPU run on the target hardware. +So the repo is structurally ready for the intended setup, but final operational confidence still comes from a real GPU run on target hardware. ## License diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index b69dbe76..7f52e381 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,8 +1,8 @@ model_name: Qwen/Qwen3.5-35B-A3B provider: null api_base: null -max_steps: 50 -groups_per_step: 8 +max_steps: 12 +groups_per_step: 2 samples_per_step: 8 temperature: 1.0 timeout_sec: 2700 diff --git a/configs/ttt_discover_autoresearch_large.yaml b/configs/ttt_discover_autoresearch_large.yaml new file mode 100644 index 00000000..03cd4069 --- /dev/null +++ b/configs/ttt_discover_autoresearch_large.yaml @@ -0,0 +1,26 @@ +model_name: Qwen/Qwen3.5-35B-A3B +provider: null +api_base: null +max_steps: 20 +groups_per_step: 2 +samples_per_step: 8 +temperature: 1.0 +timeout_sec: 2700 +run_dir: null +data_path: null +baseline_command_override: null +candidate_command_override: null +experiment_name: autoresearch-ttt-discover-large +renderer_name: null +learning_rate: 0.00004 +lora_rank: 32 +kl_penalty_coef: 0.1 +phase1_max_tokens: 26000 +save_every: 2 +wandb_project: autoresearch-ttt-discover +num_cpus_per_task: 0 +eval_timeout: 2700 +local_model_path: null +keep_history: 6 +max_concurrent_evaluations: 1 +gpu_devices: null diff --git a/configs/ttt_discover_autoresearch_medium.yaml b/configs/ttt_discover_autoresearch_medium.yaml new file mode 100644 index 00000000..9076247e --- /dev/null +++ b/configs/ttt_discover_autoresearch_medium.yaml @@ -0,0 +1,26 @@ +model_name: Qwen/Qwen3.5-35B-A3B +provider: null +api_base: null +max_steps: 12 +groups_per_step: 2 +samples_per_step: 8 +temperature: 1.0 +timeout_sec: 2700 +run_dir: null +data_path: null +baseline_command_override: null +candidate_command_override: null +experiment_name: autoresearch-ttt-discover-medium +renderer_name: null +learning_rate: 0.00004 +lora_rank: 32 +kl_penalty_coef: 0.1 +phase1_max_tokens: 26000 +save_every: 2 +wandb_project: autoresearch-ttt-discover +num_cpus_per_task: 0 +eval_timeout: 2700 +local_model_path: null +keep_history: 6 +max_concurrent_evaluations: 1 +gpu_devices: null diff --git a/configs/ttt_discover_autoresearch_small.yaml b/configs/ttt_discover_autoresearch_small.yaml new file mode 100644 index 00000000..641b22ee --- /dev/null +++ b/configs/ttt_discover_autoresearch_small.yaml @@ -0,0 +1,26 @@ +model_name: Qwen/Qwen3.5-35B-A3B +provider: null +api_base: null +max_steps: 12 +groups_per_step: 2 +samples_per_step: 4 +temperature: 1.0 +timeout_sec: 2700 +run_dir: null +data_path: null +baseline_command_override: null +candidate_command_override: null +experiment_name: autoresearch-ttt-discover-small +renderer_name: null +learning_rate: 0.00004 +lora_rank: 32 +kl_penalty_coef: 0.1 +phase1_max_tokens: 26000 +save_every: 2 +wandb_project: autoresearch-ttt-discover +num_cpus_per_task: 0 +eval_timeout: 2700 +local_model_path: null +keep_history: 6 +max_concurrent_evaluations: 1 +gpu_devices: null diff --git a/tests/test_runner.py b/tests/test_runner.py index fad5ea56..7ef1bc79 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -92,10 +92,10 @@ def test_unknown_model_requires_explicit_renderer(self) -> None: ).normalized(root) self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") - def test_group_defaults_reflect_paper_shaped_config(self) -> None: + def test_group_defaults_reflect_medium_preset(self) -> None: config = TTTAutoResearchConfig().normalized(Path(".")) - self.assertEqual(config.max_steps, 50) - self.assertEqual(config.groups_per_step, 8) + self.assertEqual(config.max_steps, 12) + self.assertEqual(config.groups_per_step, 2) self.assertEqual(config.samples_per_step, 8) self.assertEqual(config.max_concurrent_evaluations, 1) diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 3367001e..1ad826d7 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -25,8 +25,8 @@ class TTTAutoResearchConfig: model_name: str = "Qwen/Qwen3.5-35B-A3B" provider: str | None = None api_base: str | None = None - max_steps: int = 50 - groups_per_step: int = 8 + max_steps: int = 12 + groups_per_step: int = 2 samples_per_step: int = 8 temperature: float = 1.0 timeout_sec: int = 2700 From 5a914c41632b7586306729a59b655bfcc9da41ec Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Wed, 11 Mar 2026 09:23:12 +1100 Subject: [PATCH 08/17] Finalize TTT-Discover autoresearch runtime and prompt flow --- README.md | 445 +++++++------ configs/ttt_discover_autoresearch.yaml | 32 +- configs/ttt_discover_autoresearch_large.yaml | 32 +- configs/ttt_discover_autoresearch_medium.yaml | 32 +- configs/ttt_discover_autoresearch_small.yaml | 32 +- tests/test_cli_integration.py | 118 ++++ tests/test_env_smoke.py | 37 +- tests/test_prompt_builder.py | 47 ++ tests/test_reward.py | 75 ++- tests/test_runner.py | 22 +- tests/test_runpod.py | 168 +++++ ttt_autoresearch/cli.py | 98 +-- ttt_autoresearch/config.py | 55 +- ttt_autoresearch/env.py | 143 ++-- ttt_autoresearch/prompt_builder.py | 57 ++ ttt_autoresearch/reward.py | 116 +++- ttt_autoresearch/runner.py | 120 +++- ttt_autoresearch/runpod.py | 616 ++++++++++++++++++ 18 files changed, 1866 insertions(+), 379 deletions(-) create mode 100644 tests/test_prompt_builder.py create mode 100644 tests/test_runpod.py create mode 100644 ttt_autoresearch/prompt_builder.py create mode 100644 ttt_autoresearch/runpod.py diff --git a/README.md b/README.md index 40f38446..844d6ed6 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,15 @@ This repo is a focused fork of [karpathy/autoresearch](https://github.com/karpathy/autoresearch) that replaces the outer experiment loop with [TTT-Discover](https://github.com/test-time-training/discover). -The setup is: +The checked-in default is now a practical unattended setup: -- The **inner loop** is still AutoResearch: edit [`train.py`](train.py), run a fixed-budget training job, and measure `val_bpb`. -- The **outer loop** is TTT-Discover: the model proposes full replacements for `train.py`, sees the resulting metric, and is reinforced online from that reward. -- The reward is strictly the inner-loop outcome: `current_best_val_bpb - candidate_val_bpb`. +- **Outer loop:** Tinker + `openai/gpt-oss-120b` +- **Renderer:** `gpt_oss_high_reasoning` +- **Inner loop:** RunPod `H100 PCIe` spot workers +- **Main preset:** `2 groups x 8 rollouts x 12 steps` +- **Spot failover:** if a worker pod is preempted, the current rollout is retried on a replacement pod automatically -This fork keeps the original AutoResearch target and uses TTT-Discover as the policy improvement layer. +The core objective stays the same as the original AutoResearch repo: improve [`train.py`](train.py) to lower `val_bpb`. ## Credits @@ -20,269 +22,260 @@ This project builds on: - [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175) - [test-time-training/discover](https://github.com/test-time-training/discover) -The RL recipe stays with upstream `discover`. This repo provides the AutoResearch-specific environment, reward, runner, configs, and practical launch workflow. +The RL recipe stays with upstream `discover`. This repo provides the AutoResearch-specific environment, reward, runner, RunPod execution backend, and practical launch workflow. -## What This Repo Optimizes +## How The System Works -The repo has two layers: +There are two loops: -1. **Inner optimization target** - - [`prepare.py`](prepare.py) downloads data and trains the tokenizer. +1. **Inner loop** - [`train.py`](train.py) is the only file the outer model edits. - - `val_bpb` is the optimization metric. Lower is better. + - Every rollout runs a real fixed-budget AutoResearch training job. + - The score is `val_bpb`, and lower is better. -2. **Outer TTT-Discover loop** - - [`run_ttt_discover.py`](run_ttt_discover.py) launches the run. - - [`ttt_autoresearch/`](ttt_autoresearch/) adapts AutoResearch to the `discover` environment interface. - - Each candidate `train.py` is executed in an isolated workspace. - - Reward is computed from the measured improvement over the current best state. +2. **Outer loop** + - TTT-Discover samples full-file replacements for `train.py`. + - Each candidate is evaluated by the inner loop. + - Reward is a direct transformed task score: `1 / (1e-8 + val_bpb)`. + - Failed or invalid candidates receive `0.0` reward. + - Upstream `discover` updates the outer model online. -## Repository Layout +The checked-in workflow keeps the outer controller on a stable machine and uses RunPod spot instances only for the inner evaluations. That is what lets the run continue unattended if a spot worker disappears. -```text -prepare.py Fixed data prep and runtime utilities -train.py Inner training program edited by the outer model -program.md Human-authored research instructions/context -run_ttt_discover.py Main TTT-Discover entrypoint -ttt_autoresearch/ Adapter layer for environment, reward, runner, config -configs/ Practical preset YAML configs -tests/ Smoke and unit coverage for the adapter -``` - -## How The RL Loop Works +## What “Unattended” Means Here -At each outer-loop step: +This repo is designed so that: -1. TTT-Discover samples grouped candidate replacements for `train.py`. -2. Each candidate is evaluated by running a real AutoResearch training job. -3. The run logs are parsed for `val_bpb`. -4. Reward is computed from improvement over the current best state. -5. Upstream `discover` performs the online RL update. -6. If a candidate improves `val_bpb`, it becomes the new best `train.py`. +- the controller process running [`run_ttt_discover.py`](run_ttt_discover.py) stays alive on a stable machine +- inner evaluations are dispatched to RunPod spot pods +- if a pod is preempted during a rollout, the runner provisions a replacement pod +- the interrupted rollout is retried from scratch on the replacement pod +- the run continues until the configured `groups_per_step x samples_per_step x max_steps` budget is completed -Important details: +Important boundary: -- The **action** is the full replacement contents of `train.py`. -- The **reward** is the inner-loop metric outcome, not a heuristic about the patch text. -- `groups_per_step` controls how many rollout groups are sampled at each RL step. -- `samples_per_step` controls how many rollouts are sampled inside each group. -- `max_concurrent_evaluations` controls how many expensive inner `train.py` jobs may run at once. +- the controller process itself is **not** spot-resilient +- only the **inner worker pool** is spot-resilient -## Quick Start +So the outer process should run on your laptop, workstation, or another non-preemptible box. The H100 spot instances are only for the expensive inner `train.py` jobs. -**Requirements** +## Current Default -- Linux -- NVIDIA GPUs -- Python 3.11+ -- [uv](https://docs.astral.sh/uv/) +The default config at [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml) is the recommended medium run: -Install and prepare the base AutoResearch environment: - -```bash -uv sync -uv run prepare.py -uv run train.py -``` +- `model_name: openai/gpt-oss-120b` +- `renderer_name: gpt_oss_high_reasoning` +- `target_val_bpb: 0.85` +- `execution_backend: runpod` +- `groups_per_step: 2` +- `samples_per_step: 8` +- `max_steps: 12` +- `max_concurrent_evaluations: 16` +- `runpod_gpu_type_ids: ["NVIDIA H100 PCIe"]` +- `runpod_interruptible: true` -Then launch the default practical TTT-Discover mode: +That means: -```bash -uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml -``` +- `16` rollouts per outer step +- `12` outer RL updates +- `192` rollout evaluations total +- `1` extra baseline run before RL starts +- `193` total inner jobs -## Training Presets +## Presets -This repo now ships with three practical presets instead of a paper-scale default. +The repo ships with three practical presets: ### Small File: [`configs/ttt_discover_autoresearch_small.yaml`](configs/ttt_discover_autoresearch_small.yaml) -- `groups_per_step: 2` -- `samples_per_step: 4` -- `max_steps: 12` -- total evaluations: `96` - -Use this when: - -- you want a quick sanity run -- you are testing a new model backend -- you are on a single GPU and want something that finishes in a reasonable time +- `2 x 4 x 12` +- `96` RL rollouts +- `8` concurrent RunPod workers ### Medium File: [`configs/ttt_discover_autoresearch_medium.yaml`](configs/ttt_discover_autoresearch_medium.yaml) -- `groups_per_step: 2` -- `samples_per_step: 8` -- `max_steps: 12` -- total evaluations: `192` - -This is the **recommended main mode** for the repo. +- `2 x 8 x 12` +- `192` RL rollouts +- `16` concurrent RunPod workers -It is also the checked-in default at [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml). +This is the recommended main mode and matches the default config. ### Large File: [`configs/ttt_discover_autoresearch_large.yaml`](configs/ttt_discover_autoresearch_large.yaml) -- `groups_per_step: 2` -- `samples_per_step: 8` -- `max_steps: 20` -- total evaluations: `320` +- `2 x 8 x 20` +- `320` RL rollouts +- `16` concurrent RunPod workers -Use this when the medium run is already stable and you want more policy updates without moving into paper-scale compute. +Use this only after the medium run is stable. -## Recommended Modes +## RunPod Backend -For this fork, the most realistic settings are: +The inner-loop executor now supports two backends: -- **Small:** `2 x 4 x 12` -- **Medium:** `2 x 8 x 12` -- **Large:** `2 x 8 x 20` +- `local` +- `runpod` -These are intentionally sized around the practical AutoResearch regime, where each rollout is a real GPU training job. They keep grouped rollouts and online RL updates from TTT-Discover, but avoid the extreme compute profile of the original paper. +The `runpod` backend does the following: -## Hardware Recommendation +1. Creates up to `max_concurrent_evaluations` spot pods. +2. Waits for SSH on each pod. +3. Bootstraps the pod by: + - uploading the repo snapshot + - installing `uv` + - running `uv sync` + - running `uv run prepare.py --num-shards 10` +4. Uploads each candidate workspace to a worker pod. +5. Runs the inner command remotely. +6. Pulls back `stdout.log`, `stderr.log`, and metrics. +7. Deletes the pods automatically when the run finishes. -If your goal is to push `val_bpb` seriously, the inner loop should run on **H100 80GB** class GPUs. +If a pod disappears during upload, bootstrap, or execution, the worker is retired, a replacement is created, and the interrupted rollout is retried. -Why: +## Prerequisites -- [`train.py`](train.py) uses Hopper-specific FA3 kernels when available. -- [`program.md`](program.md) shows representative peak VRAM around `45 GB`. -- `A100 40GB` is therefore not viable for the intended setup. +You need: -Recommended inner-loop target: +- Linux or macOS for the controller machine +- Python 3.11+ +- [uv](https://docs.astral.sh/uv/) +- a Tinker-enabled account for the outer loop +- a RunPod account with: + - API access + - an SSH public key registered in the account + - access to H100 spot instances -- **Best cost/performance:** H100 PCIe 80GB -- **Best absolute performance:** H100 SXM 80GB +Environment: -For these practical presets, I recommend: +```bash +export RUNPOD_API_KEY=... +``` -- **Small (`2x4x12`)**: rent `8x H100 80GB` -- **Medium (`2x8x12`)**: rent `16x H100 80GB` -- **Large (`2x8x20`)**: rent `16x H100 80GB` +You also need whatever Tinker credentials your local `ttt-discover` installation expects. -That gives one GPU per rollout in a step wave. If you rent fewer GPUs, the run still works, but each step is split into multiple waves and takes longer. +## Quick Start -To run with rented GPUs, set: +Launch the default unattended medium run: -```yaml -max_concurrent_evaluations: 16 -gpu_devices: ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15"] +```bash +uv sync +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml ``` -The runner pins each candidate subprocess to one configured `CUDA_VISIBLE_DEVICES` slot. - -## Cost Model +Or explicitly choose the medium preset: -There are two cost buckets: +```bash +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_medium.yaml +``` -1. **Inner-loop GPU rental** - - pays for the real `train.py` runs - - dominates total cost in this repo +## Cost And Runtime Shape -2. **Outer-loop Tinker cost** - - pays for prompt prefill, sampling, and RL training tokens - - is much smaller than the inner-loop GPU cost here +For this repo, the expensive part is the inner loop. Each rollout is a real five-minute AutoResearch training job. -### Cost Assumptions +The repo’s own reference timing in [`program.md`](program.md) shows: -The estimates below use: +- `total_seconds: 325.9` per rollout -- `Qwen/Qwen3.5-35B-A3B` on Tinker -- H100 PCIe 80GB at about `$2.86 / GPU / hour` -- about `325.9s` per inner rollout -- about `$0.020` Tinker cost per rollout as a practical midpoint for this repo +That means the default medium run has: -### Preset Cost Estimates +- `192` RL rollouts +- `1` baseline run +- `193` total inner runs +- about `17.47` total GPU-hours -| Mode | Shape | Total evals | GPU rental | Tinker | Total | -|---|---:|---:|---:|---:|---:| -| Small | `2x4x12` | 96 | about `$25` | about `$1.9` | about `$27` | -| Medium | `2x8x12` | 192 | about `$50` | about `$3.8` | about `$54` | -| Large | `2x8x20` | 320 | about `$83` | about `$6.4` | about `$89` | +### Example Medium Budget -### Cost Distribution +Using your current spot numbers: -For these realistic runs, the cost split is still roughly: +- `H100 PCIe spot: $1.25/hr` +- `H100 SXM spot: $1.75/hr` -- **~92% GPU rental** -- **~8% Tinker** +The medium run works out to: -That is the core difference between this repo and cheaper code-generation tasks: each rollout is a real training job. +- `H100 PCIe`: about `$21.84` +- `H100 SXM`: about `$30.57` -## How I Recommend Running It +Tinker is the smaller cost bucket here. The exact amount depends on current Tinker pricing and token usage, but for this repo it is materially smaller than the H100 rental line item. -### Single GPU +### Wall Clock -Use the small preset, and keep evaluation serialized: +Total GPU-hours are roughly fixed, so more pods mostly reduce elapsed time, not total spend. -```yaml -groups_per_step: 2 -samples_per_step: 4 -max_steps: 12 -max_concurrent_evaluations: 1 -gpu_devices: null -``` +Approximate medium-run wall clock: -This is the safest way to stay close to the original one-GPU AutoResearch style while still using the TTT-Discover framework. +- `1 H100`: about `18-20h` +- `8 H100s`: about `2.5-3h` +- `16 H100s`: about `1.3-1.8h` -### Practical Rented Run +## Model And Renderer -Use the medium preset: +The checked-in default is: -```bash -uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_medium.yaml +```yaml +model_name: openai/gpt-oss-120b +renderer_name: gpt_oss_high_reasoning ``` -Recommended provisioning: +This is intentional: -- `16x H100 PCIe 80GB` -- `max_concurrent_evaluations: 16` -- `gpu_devices` set to the visible devices on the host +- it matches the strongest paper-aligned model family more closely than the older Qwen default +- it is already supported by the renderer mapping in [`ttt_autoresearch/config.py`](ttt_autoresearch/config.py) +- it is the intended outer-loop model for the default RunPod workflow -This is the main mode I recommend if your goal is to beat the baseline without exploding compute. +Other models still work, but if the model family is not recognized automatically you must set `renderer_name` explicitly. -### Larger Budget Run +## Important Config Knobs -Use the large preset: +The main knobs for unattended RunPod execution are: -```bash -uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_large.yaml -``` +- `execution_backend` + - use `runpod` for remote spot workers + - use `local` for direct local GPU execution +- `max_concurrent_evaluations` + - number of worker pods for `runpod` + - number of local simultaneous inner runs for `local` +- `runpod_gpu_type_ids` + - default is `["NVIDIA H100 PCIe"]` +- `runpod_interruptible` + - leave this `true` for spot behavior +- `runpod_bootstrap_commands` + - optional override if you want to use a custom image or template +- `runpod_retry_limit` + - how many times to reprovision and retry an interrupted rollout before surfacing a failure -This keeps the same grouped structure as medium, but increases the number of RL updates from `12` to `20`. +## Fixed Prompt Target -## Model and Renderer Configuration +The checked-in presets use: -The model is configurable, but the prompt and response format must match a supported renderer. +```yaml +target_val_bpb: 0.85 +``` -Known-good renderer values: +This is a prompt-side benchmark target, not a reward cap. -- `qwen3` -- `qwen3_instruct` -- `gpt_oss_no_sysprompt` -- `gpt_oss_low_reasoning` -- `gpt_oss_medium_reasoning` -- `gpt_oss_high_reasoning` +- the model is shown the current starting state and the gap to `0.85` +- the RL reward still comes from the actual achieved `val_bpb` +- if a rollout beats `0.85`, it is still rewarded more for going even lower -Examples: +This mirrors how upstream `discover` environments use fixed benchmark targets in the prompt while computing reward from the evaluated task score. -```yaml -model_name: Qwen/Qwen3.5-35B-A3B -renderer_name: qwen3 -``` +## Repository Layout -```yaml -model_name: openai/gpt-oss-120b -renderer_name: gpt_oss_high_reasoning +```text +prepare.py Fixed data prep and runtime utilities +train.py Inner training program edited by the outer model +program.md Human-authored research instructions/context +run_ttt_discover.py Main TTT-Discover entrypoint +ttt_autoresearch/ Adapter layer for environment, reward, runner, RunPod, config +configs/ Practical preset YAML configs +tests/ Smoke and unit coverage for the adapter ``` -If you use an unknown model family, set `renderer_name` explicitly. The config fails fast if it cannot infer a compatible renderer. - ## Output Artifacts Each run writes artifacts under `runs//`: @@ -294,36 +287,104 @@ Each run writes artifacts under `runs//`: - `best/metrics.json` - `candidates/` - `discover_log/` +- `runpod_pool.json` -## Plain AutoResearch Mode Still Works +`runpod_pool.json` records the worker pod metadata for the current run so you can inspect what was provisioned. -This fork does not remove the original AutoResearch workflow. You can still use it directly: +The important resume/checkpoint files are: + +- `baseline.json` + - cached baseline result; if it already exists, the CLI reuses it instead of rerunning baseline +- `baseline/train.py` + - stored baseline script snapshot for reproducible resume +- `best/train.py` + - best discovered script so far +- `best/metrics.json` + - best discovered `val_bpb` plus artifact paths +- `history.jsonl` + - append-only candidate evaluation log +- `candidates/_/train.py` + - exact candidate script evaluated for that rollout +- `candidates/_/stdout.log` + - raw stdout from the inner AutoResearch run +- `candidates/_/stderr.log` + - raw stderr from the inner AutoResearch run +- `candidates/_/metrics.json` + - parsed metrics sidecar for that rollout +- `candidates/_/rollout_manifest.json` + - self-contained rollout record with the starting state, candidate payload, evaluation result, reward, and promotion outcome +- invalid or malformed model outputs are also persisted under `candidates/` with a `rollout_manifest.json`, `metrics.json`, and raw `response.txt` +- `discover_log/checkpoints.jsonl` + - upstream TTT-Discover checkpoint index +- `discover_log/` + - LoRA/training state and sampler checkpoints used for resume + +## Resuming A Stopped Run + +To continue a stopped run, reuse the same `run_dir`. + +Example: + +```yaml +run_dir: runs/my-main-run +``` + +Then rerun the same command: ```bash -uv run prepare.py -uv run train.py +uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch.yaml +``` + +Resume behavior: + +- the CLI reuses `baseline.json` and `baseline/train.py` if they already exist +- upstream `discover` reloads the latest training checkpoint from `discover_log/checkpoints.jsonl` +- upstream sampler state is reloaded from the matching sampler checkpoint step +- every evaluated rollout remains on disk under `candidates/`, so prompt/response/result provenance is preserved even if the run is interrupted + +If you stopped at `12` steps and want to continue farther, increase `max_steps` above the completed count before rerunning. + +For example, to continue a finished medium run out to `20` steps: + +- keep the same `run_dir` +- change `max_steps: 20` +- rerun the command + +Important resume rule: + +- resume with the same code revision, model, renderer, rollout structure, and run directory whenever possible +- changing those mid-run is not guaranteed to be meaningful or stable + +## Local Mode Still Exists + +If you want to run without RunPod, set: + +```yaml +execution_backend: local ``` -The TTT-Discover path is an additional outer loop, not a replacement for the inner codebase. +and configure `gpu_devices` if you want more than one local concurrent evaluation. ## Current Readiness -What is tested locally: +What is covered in tests: -- config loading and override behavior +- config loading and normalization - reward mapping - candidate parsing -- environment prompt and state flow - CLI wiring into upstream `discover` -- concurrency gating for inner evaluations +- local concurrency gating +- RunPod retry logic for interrupted workers +- runner cleanup behavior -What is still environment-dependent: +What is still operationally environment-dependent: -- a true end-to-end production run on the target Linux/CUDA machine -- provider-specific model serving details -- long-run throughput and stability on rented multi-GPU hardware +- real RunPod API credentials +- SSH access from the controller to the worker pods +- real Tinker credentials and provider setup +- long-run stability on your specific account and spot market -So the repo is structurally ready for the intended setup, but final operational confidence still comes from a real GPU run on target hardware. +So the repo is structurally ready for unattended Tinker + RunPod operation, but the final production proof is still a real run on your account. ## License diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index 7f52e381..bae46b7e 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,6 +1,7 @@ -model_name: Qwen/Qwen3.5-35B-A3B +model_name: openai/gpt-oss-120b provider: null api_base: null +target_val_bpb: 0.85 max_steps: 12 groups_per_step: 2 samples_per_step: 8 @@ -11,7 +12,7 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover -renderer_name: null +renderer_name: gpt_oss_high_reasoning learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 @@ -22,5 +23,30 @@ num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 1 +max_concurrent_evaluations: 16 gpu_devices: null +execution_backend: runpod +runpod_api_key_env: RUNPOD_API_KEY +runpod_api_base: https://rest.runpod.io/v1 +runpod_cloud_type: COMMUNITY +runpod_interruptible: true +runpod_gpu_type_ids: + - NVIDIA H100 PCIe +runpod_template_id: null +runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 +runpod_name_prefix: autoresearch-ttt +runpod_support_public_ip: true +runpod_ports: + - 22/tcp +runpod_container_disk_gb: 50 +runpod_volume_gb: 0 +runpod_volume_mount_path: /workspace +runpod_ssh_user: root +runpod_ssh_private_key_path: null +runpod_repo_root: /workspace/autoresearch +runpod_prepare_num_shards: 10 +runpod_bootstrap_timeout_sec: 7200 +runpod_retry_limit: 3 +runpod_poll_interval_sec: 5 +runpod_bootstrap_commands: null +runpod_terminate_on_close: true diff --git a/configs/ttt_discover_autoresearch_large.yaml b/configs/ttt_discover_autoresearch_large.yaml index 03cd4069..a63d4310 100644 --- a/configs/ttt_discover_autoresearch_large.yaml +++ b/configs/ttt_discover_autoresearch_large.yaml @@ -1,6 +1,7 @@ -model_name: Qwen/Qwen3.5-35B-A3B +model_name: openai/gpt-oss-120b provider: null api_base: null +target_val_bpb: 0.85 max_steps: 20 groups_per_step: 2 samples_per_step: 8 @@ -11,7 +12,7 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover-large -renderer_name: null +renderer_name: gpt_oss_high_reasoning learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 @@ -22,5 +23,30 @@ num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 1 +max_concurrent_evaluations: 16 gpu_devices: null +execution_backend: runpod +runpod_api_key_env: RUNPOD_API_KEY +runpod_api_base: https://rest.runpod.io/v1 +runpod_cloud_type: COMMUNITY +runpod_interruptible: true +runpod_gpu_type_ids: + - NVIDIA H100 PCIe +runpod_template_id: null +runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 +runpod_name_prefix: autoresearch-ttt +runpod_support_public_ip: true +runpod_ports: + - 22/tcp +runpod_container_disk_gb: 50 +runpod_volume_gb: 0 +runpod_volume_mount_path: /workspace +runpod_ssh_user: root +runpod_ssh_private_key_path: null +runpod_repo_root: /workspace/autoresearch +runpod_prepare_num_shards: 10 +runpod_bootstrap_timeout_sec: 7200 +runpod_retry_limit: 3 +runpod_poll_interval_sec: 5 +runpod_bootstrap_commands: null +runpod_terminate_on_close: true diff --git a/configs/ttt_discover_autoresearch_medium.yaml b/configs/ttt_discover_autoresearch_medium.yaml index 9076247e..d46b6a9f 100644 --- a/configs/ttt_discover_autoresearch_medium.yaml +++ b/configs/ttt_discover_autoresearch_medium.yaml @@ -1,6 +1,7 @@ -model_name: Qwen/Qwen3.5-35B-A3B +model_name: openai/gpt-oss-120b provider: null api_base: null +target_val_bpb: 0.85 max_steps: 12 groups_per_step: 2 samples_per_step: 8 @@ -11,7 +12,7 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover-medium -renderer_name: null +renderer_name: gpt_oss_high_reasoning learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 @@ -22,5 +23,30 @@ num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 1 +max_concurrent_evaluations: 16 gpu_devices: null +execution_backend: runpod +runpod_api_key_env: RUNPOD_API_KEY +runpod_api_base: https://rest.runpod.io/v1 +runpod_cloud_type: COMMUNITY +runpod_interruptible: true +runpod_gpu_type_ids: + - NVIDIA H100 PCIe +runpod_template_id: null +runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 +runpod_name_prefix: autoresearch-ttt +runpod_support_public_ip: true +runpod_ports: + - 22/tcp +runpod_container_disk_gb: 50 +runpod_volume_gb: 0 +runpod_volume_mount_path: /workspace +runpod_ssh_user: root +runpod_ssh_private_key_path: null +runpod_repo_root: /workspace/autoresearch +runpod_prepare_num_shards: 10 +runpod_bootstrap_timeout_sec: 7200 +runpod_retry_limit: 3 +runpod_poll_interval_sec: 5 +runpod_bootstrap_commands: null +runpod_terminate_on_close: true diff --git a/configs/ttt_discover_autoresearch_small.yaml b/configs/ttt_discover_autoresearch_small.yaml index 641b22ee..e6d58df1 100644 --- a/configs/ttt_discover_autoresearch_small.yaml +++ b/configs/ttt_discover_autoresearch_small.yaml @@ -1,6 +1,7 @@ -model_name: Qwen/Qwen3.5-35B-A3B +model_name: openai/gpt-oss-120b provider: null api_base: null +target_val_bpb: 0.85 max_steps: 12 groups_per_step: 2 samples_per_step: 4 @@ -11,7 +12,7 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover-small -renderer_name: null +renderer_name: gpt_oss_high_reasoning learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 @@ -22,5 +23,30 @@ num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 1 +max_concurrent_evaluations: 8 gpu_devices: null +execution_backend: runpod +runpod_api_key_env: RUNPOD_API_KEY +runpod_api_base: https://rest.runpod.io/v1 +runpod_cloud_type: COMMUNITY +runpod_interruptible: true +runpod_gpu_type_ids: + - NVIDIA H100 PCIe +runpod_template_id: null +runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 +runpod_name_prefix: autoresearch-ttt +runpod_support_public_ip: true +runpod_ports: + - 22/tcp +runpod_container_disk_gb: 50 +runpod_volume_gb: 0 +runpod_volume_mount_path: /workspace +runpod_ssh_user: root +runpod_ssh_private_key_path: null +runpod_repo_root: /workspace/autoresearch +runpod_prepare_num_shards: 10 +runpod_bootstrap_timeout_sec: 7200 +runpod_retry_limit: 3 +runpod_poll_interval_sec: 5 +runpod_bootstrap_commands: null +runpod_terminate_on_close: true diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py index 28714d17..07fa49f4 100644 --- a/tests/test_cli_integration.py +++ b/tests/test_cli_integration.py @@ -84,10 +84,12 @@ async def fake_discover_main(cfg): "\n".join( [ "model_name: Qwen/Qwen3.5-35B-A3B", + "execution_backend: local", f"run_dir: {run_dir}", "max_steps: 3", "groups_per_step: 3", "samples_per_step: 2", + "max_concurrent_evaluations: 1", "baseline_command_override:", f" - {sys.executable}", " - -c", @@ -111,6 +113,10 @@ async def fake_discover_main(cfg): self.assertTrue(captured.get("dataset_builder_called")) self.assertEqual(captured["rl_config"]["model_name"], "Qwen/Qwen3.5-35B-A3B") self.assertEqual(captured["rl_config"]["num_epochs"], 3) + self.assertEqual(captured["rl_config"]["adv_estimator"], "entropic_adaptive_beta") + self.assertEqual(captured["rl_config"]["loss_fn"], "importance_sampling") + self.assertEqual(captured["rl_config"]["num_substeps"], 1) + self.assertTrue(captured["rl_config"]["remove_constant_reward_groups"]) self.assertEqual(captured["dataset_config"]["batch_size"], 3) self.assertEqual(captured["dataset_config"]["group_size"], 2) self.assertEqual(captured["dataset_config"]["problem_type"], "autoresearch") @@ -121,6 +127,118 @@ async def fake_discover_main(cfg): else: sys.modules[name] = previous + def test_cli_reuses_existing_baseline_when_resuming(self) -> None: + captured: dict[str, object] = {} + + fake_root = types.ModuleType("ttt_discover") + fake_rl = types.ModuleType("ttt_discover.rl") + fake_rl_train = types.ModuleType("ttt_discover.rl.train") + fake_utils = types.ModuleType("ttt_discover.tinker_utils") + fake_dataset_builder = types.ModuleType("ttt_discover.tinker_utils.dataset_builder") + + class FakeRLConfig: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + class FakeDatasetConfig: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + captured["dataset_config"] = kwargs + + def fake_get_single_problem_dataset_builder(config): + async def builder(): + captured["dataset_builder_called"] = True + return {"dataset_config": config} + + return builder + + async def fake_discover_main(cfg): + captured["rl_config"] = cfg.__dict__.copy() + await cfg.dataset_builder() + + fake_rl_train.Config = FakeRLConfig + fake_rl_train.main = fake_discover_main + fake_dataset_builder.DatasetConfig = FakeDatasetConfig + fake_dataset_builder.get_single_problem_dataset_builder = fake_get_single_problem_dataset_builder + + previous_modules = {name: sys.modules.get(name) for name in ( + "ttt_discover", + "ttt_discover.rl", + "ttt_discover.rl.train", + "ttt_discover.tinker_utils", + "ttt_discover.tinker_utils.dataset_builder", + )} + sys.modules["ttt_discover"] = fake_root + sys.modules["ttt_discover.rl"] = fake_rl + sys.modules["ttt_discover.rl.train"] = fake_rl_train + sys.modules["ttt_discover.tinker_utils"] = fake_utils + sys.modules["ttt_discover.tinker_utils.dataset_builder"] = fake_dataset_builder + + try: + with tempfile.TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) + run_dir = tmp_path / "runs" / "resume-test" + (run_dir / "baseline" / "workspace").mkdir(parents=True) + (run_dir / "baseline" / "workspace" / "train.py").write_text("# stored baseline\n", encoding="utf-8") + (run_dir / "baseline" / "train.py").write_text("# stored baseline\n", encoding="utf-8") + (run_dir / "best").mkdir(parents=True) + (run_dir / "baseline.json").write_text( + "\n".join( + [ + "{", + ' "status": "success",', + ' "val_bpb": 1.0,', + f' "stdout_path": "{run_dir / "baseline" / "workspace" / "stdout.log"}",', + f' "stderr_path": "{run_dir / "baseline" / "workspace" / "stderr.log"}",', + ' "elapsed_sec": 1.0,', + f' "workspace_path": "{run_dir / "baseline" / "workspace"}",', + ' "metrics_path": null,', + ' "command": ["python", "train.py"],', + ' "returncode": 0', + "}", + ] + ) + + "\n", + encoding="utf-8", + ) + + config_path = tmp_path / "config.yaml" + config_path.write_text( + "\n".join( + [ + "model_name: Qwen/Qwen3.5-35B-A3B", + "execution_backend: local", + f"run_dir: {run_dir}", + "max_steps: 3", + "groups_per_step: 2", + "samples_per_step: 2", + "max_concurrent_evaluations: 1", + "baseline_command_override:", + f" - {sys.executable}", + " - -c", + ' - "import sys; sys.exit(7)"', + "candidate_command_override:", + f" - {sys.executable}", + " - -c", + ' - "print(\'val_bpb: 0.900000\')"', + "wandb_project: null", + ] + ) + + "\n", + encoding="utf-8", + ) + + exit_code = cli.main(["--config", str(config_path)]) + self.assertEqual(exit_code, 0) + self.assertTrue(captured.get("dataset_builder_called")) + self.assertEqual(captured["dataset_config"]["batch_size"], 2) + finally: + for name, previous in previous_modules.items(): + if previous is None: + sys.modules.pop(name, None) + else: + sys.modules[name] = previous + if __name__ == "__main__": unittest.main() diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 0528b141..1cd7252a 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -7,12 +7,28 @@ import sys from ttt_autoresearch.config import TTTAutoResearchConfig -from ttt_autoresearch.env import AutoResearchDiscoverEnv +from ttt_autoresearch.env import AutoResearchDiscoverEnv, AutoResearchState from ttt_autoresearch.reward import AutoResearchRewardEvaluator from ttt_autoresearch.runner import AutoResearchRunner class EnvSmokeTests(unittest.TestCase): + def test_state_prompt_shows_before_after_without_construction(self) -> None: + state = AutoResearchState( + timestep=1, + construction=[], + code="print('candidate')\n", + value=-0.9, + parent_values=[-1.1], + observation="val_bpb: 0.900000\n", + baseline_val_bpb=1.1, + current_best_val_bpb=0.9, + raw_score=0.9, + ) + prompt = state.to_prompt(0.85, metric_name="val_bpb", maximize=False, language="python") + self.assertIn("Here is the val_bpb before and after running the code above", prompt) + self.assertIn("1.100000 -> 0.900000", prompt) + def test_env_prompt_and_reward_flow(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) @@ -25,7 +41,10 @@ def test_env_prompt_and_reward_flow(self) -> None: (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, timeout_sec=1, + target_val_bpb=0.95, candidate_command_override=[sys.executable, "tests/fixtures/fake_train.py"], ).normalized(root) runner = AutoResearchRunner(root, config, Path(config.run_dir)) @@ -42,7 +61,21 @@ def test_env_prompt_and_reward_flow(self) -> None: })()) prompt = env.get_question() - self.assertIn("Current best val_bpb: 1.100000", prompt) + self.assertIn("You are iteratively optimizing val_bpb.", prompt) + self.assertIn("Current val_bpb (lower is better): 1.100000", prompt) + self.assertIn("Target: 0.95", prompt) + self.assertIn("Here is the last code we ran", prompt) + self.assertIn("## Problem", prompt) + self.assertIn("## Budget & Resources", prompt) + self.assertIn("## Rules", prompt) + self.assertIn("You may want to start your search from the current training script shown above.", prompt) + self.assertIn("This is the current starting point selected by the search procedure.", prompt) + self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) + self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) + self.assertNotIn("Baseline val_bpb from the original script", prompt) + self.assertNotIn("LOOP FOREVER", prompt) + self.assertNotIn("results.tsv", prompt) + self.assertNotIn("git reset", prompt) self.assertTrue(env.check_format('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}')) verify = asyncio.run(env.check_answer('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}', 0)) diff --git a/tests/test_prompt_builder.py b/tests/test_prompt_builder.py new file mode 100644 index 00000000..3ec5b64c --- /dev/null +++ b/tests/test_prompt_builder.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import unittest + +from ttt_autoresearch.prompt_builder import build_rollout_prompt + + +class PromptBuilderTests(unittest.TestCase): + def test_prompt_is_single_rollout_specific(self) -> None: + prompt = build_rollout_prompt( + state_ctx="You are iteratively optimizing val_bpb.\nCurrent val_bpb (lower is better): 1.020000", + construction_section=( + "You may want to start your search from the current training script shown above.\n" + "This is the current starting point selected by the search procedure.\n" + "You are encouraged to explore meaningfully different directions if the current approach appears saturated." + ), + code_section=( + "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" + "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" + "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" + "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." + ), + ) + self.assertIn("expert machine learning researcher", prompt) + self.assertIn("## Problem", prompt) + self.assertIn("## Budget & Resources", prompt) + self.assertIn("## AutoResearch Invariants", prompt) + self.assertIn("## Rules", prompt) + self.assertIn("You are iteratively optimizing val_bpb.", prompt) + self.assertIn("You may want to start your search from the current training script shown above.", prompt) + self.assertIn("This is the current starting point selected by the search procedure.", prompt) + self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) + self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) + self.assertIn("Maximum sequence length is `2048`", prompt) + self.assertIn("Validation uses the pinned shard `06542`", prompt) + self.assertIn("vocab size `8192`", prompt) + self.assertIn("forward(x, y, reduction='none')", prompt) + self.assertIn("Return exactly one ```json``` block", prompt) + self.assertNotIn("Baseline val_bpb from the original script", prompt) + self.assertNotIn("LOOP FOREVER", prompt) + self.assertNotIn("results.tsv", prompt) + self.assertNotIn("git reset", prompt) + self.assertNotIn("NEVER STOP", prompt) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_reward.py b/tests/test_reward.py index bcdb4a3d..1a817425 100644 --- a/tests/test_reward.py +++ b/tests/test_reward.py @@ -1,6 +1,7 @@ from __future__ import annotations from pathlib import Path +import json import tempfile import threading import time @@ -26,8 +27,23 @@ def test_reward_mapping(self) -> None: command=["python", "train.py"], returncode=0, ) - reward, correctness = reward_for_result(1.0, result) - self.assertAlmostEqual(reward, 0.1) + reward, correctness = reward_for_result(result) + self.assertAlmostEqual(reward, 1.0 / 0.9) + self.assertEqual(correctness, 1.0) + + regression_result = RunResult( + status="success", + val_bpb=1.1, + stdout_path=Path("stdout.log"), + stderr_path=Path("stderr.log"), + elapsed_sec=1.0, + workspace_path=Path("."), + metrics_path=None, + command=["python", "train.py"], + returncode=0, + ) + reward, correctness = reward_for_result(regression_result) + self.assertAlmostEqual(reward, 1.0 / 1.1) self.assertEqual(correctness, 1.0) timeout_result = RunResult( @@ -41,8 +57,8 @@ def test_reward_mapping(self) -> None: command=["python", "train.py"], returncode=None, ) - reward, correctness = reward_for_result(1.0, timeout_result) - self.assertEqual(reward, -0.5) + reward, correctness = reward_for_result(timeout_result) + self.assertEqual(reward, 0.0) self.assertEqual(correctness, 0.0) def test_evaluator_uses_inner_metric_as_reward(self) -> None: @@ -57,6 +73,8 @@ def test_evaluator_uses_inner_metric_as_reward(self) -> None: (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, timeout_sec=1, baseline_command_override=[sys.executable, "tests/fixtures/fake_train.py"], candidate_command_override=[sys.executable, "tests/fixtures/fake_train.py"], @@ -77,6 +95,48 @@ def test_evaluator_uses_inner_metric_as_reward(self) -> None: result = evaluator.get_reward(payload, state) self.assertGreater(result["reward"], 0.0) self.assertEqual(result["correctness"], 1.0) + manifest = json.loads((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json").__next__().read_text(encoding="utf-8")) + self.assertEqual(manifest["starting_state"]["timestep"], -1) + self.assertEqual(manifest["candidate"]["summary"], "improve") + self.assertEqual(manifest["evaluation"]["status"], "success") + + def test_invalid_candidate_is_persisted_to_history_and_manifest(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("# val_bpb: 1.000000\n", encoding="utf-8") + + config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, + timeout_sec=1, + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.0) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + evaluator = AutoResearchRewardEvaluator(problem_type="autoresearch", log_dir=str(bootstrap.run_dir)) + state = AutoResearchState( + timestep=2, + construction=[], + code=(root / "train.py").read_text(encoding="utf-8"), + value=-1.0, + baseline_val_bpb=1.0, + current_best_val_bpb=1.0, + ) + + result = evaluator.get_reward("not-json", state) + + self.assertEqual(result["metrics"]["candidate_status"], "invalid_candidate") + history_path = Path(config.run_dir) / "history.jsonl" + history_entries = history_path.read_text(encoding="utf-8").strip().splitlines() + self.assertEqual(len(history_entries), 1) + history = json.loads(history_entries[0]) + self.assertEqual(history["status"], "invalid_candidate") + manifest_path = next((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json")) + manifest = json.loads(manifest_path.read_text(encoding="utf-8")) + self.assertEqual(manifest["evaluation"]["status"], "invalid_candidate") + self.assertEqual(manifest["raw_response"], "not-json") def test_concurrent_reward_calls_serialize_inner_evaluations(self) -> None: class FakeRunner: @@ -112,13 +172,16 @@ def update_best(self, **_: object) -> bool: def append_history(self, _: dict[str, object]) -> None: return None + def write_rollout_manifest(self, workspace: Path, payload: dict[str, object]) -> Path: + return workspace / "rollout_manifest.json" + def read_text(self, _: Path, max_chars: int = 4000) -> str: return "" bootstrap = BootstrapContext( repo_root=Path("."), run_dir=Path("."), - config=TTTAutoResearchConfig(max_concurrent_evaluations=1).normalized(Path(".")), + config=TTTAutoResearchConfig(execution_backend="local", max_concurrent_evaluations=1).normalized(Path(".")), program_text="program", baseline_train_py="train", baseline_val_bpb=1.0, @@ -153,7 +216,7 @@ def test_parallel_evaluations_require_explicit_gpu_devices(self) -> None: bootstrap = BootstrapContext( repo_root=Path("."), run_dir=Path("."), - config=TTTAutoResearchConfig(max_concurrent_evaluations=2).normalized(Path(".")), + config=TTTAutoResearchConfig(execution_backend="local", max_concurrent_evaluations=2).normalized(Path(".")), program_text="program", baseline_train_py="train", baseline_val_bpb=1.0, diff --git a/tests/test_runner.py b/tests/test_runner.py index 7ef1bc79..cd6b10cb 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -31,6 +31,7 @@ def test_runner_reads_metric_and_status(self) -> None: (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") config = TTTAutoResearchConfig( + execution_backend="local", timeout_sec=1, baseline_command_override=[sys.executable, "tests/fixtures/fake_train.py"], ).normalized(root) @@ -39,6 +40,21 @@ def test_runner_reads_metric_and_status(self) -> None: result = runner.run_baseline(bootstrap=bootstrap) self.assertEqual(result.status, "success") self.assertAlmostEqual(result.val_bpb, 1.25) + self.assertTrue((Path(config.run_dir) / "baseline" / "train.py").exists()) + + def test_build_bootstrap_prefers_stored_baseline_snapshot(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + run_dir = root / "resume-run" + (root / "program.md").write_text("program", encoding="utf-8") + (root / "train.py").write_text("repo version\n", encoding="utf-8") + (run_dir / "baseline").mkdir(parents=True) + (run_dir / "baseline" / "train.py").write_text("stored baseline\n", encoding="utf-8") + + config = TTTAutoResearchConfig(run_dir=str(run_dir)).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.0) + self.assertEqual(bootstrap.baseline_train_py, "stored baseline\n") def test_config_normalizes_relative_paths_and_overrides_env(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: @@ -94,10 +110,14 @@ def test_unknown_model_requires_explicit_renderer(self) -> None: def test_group_defaults_reflect_medium_preset(self) -> None: config = TTTAutoResearchConfig().normalized(Path(".")) + self.assertEqual(config.model_name, "openai/gpt-oss-120b") + self.assertEqual(config.execution_backend, "runpod") self.assertEqual(config.max_steps, 12) self.assertEqual(config.groups_per_step, 2) self.assertEqual(config.samples_per_step, 8) - self.assertEqual(config.max_concurrent_evaluations, 1) + self.assertEqual(config.max_concurrent_evaluations, 16) + self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") + self.assertEqual(config.runpod_gpu_type_ids, ["NVIDIA H100 PCIe"]) def test_gpu_devices_are_normalized(self) -> None: config = TTTAutoResearchConfig(gpu_devices=[0, 3, 7]).normalized(Path(".")) diff --git a/tests/test_runpod.py b/tests/test_runpod.py new file mode 100644 index 00000000..2f5b280c --- /dev/null +++ b/tests/test_runpod.py @@ -0,0 +1,168 @@ +from __future__ import annotations + +from pathlib import Path +import json +import os +import queue +import tempfile +import threading +import unittest + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.runpod import RemoteExecutionResult, RunPodError, RunPodPod, RunPodPodLostError, RunPodPool +from ttt_autoresearch.runner import AutoResearchRunner + + +class RunPodPoolTests(unittest.TestCase): + def test_execute_workspace_retries_after_spot_interruption(self) -> None: + class FakePool(RunPodPool): + def __init__(self, config: TTTAutoResearchConfig) -> None: + self.config = config + self.repo_root = Path(".") + self.run_dir = Path(".") + self.lock = threading.Lock() + self.available: queue.Queue[RunPodPod] = queue.Queue() + self.created_pods: dict[str, RunPodPod] = {} + self.repo_archive_path = Path("repo.tar.gz") + self.repo_archive_lock = threading.Lock() + self.closed = False + self.sequence = 0 + self.calls = 0 + self.releases: list[tuple[str, bool]] = [] + + def _acquire_pod(self) -> RunPodPod: + pod = RunPodPod(id=f"pod-{self.sequence}", name=f"pod-{self.sequence}") + self.sequence += 1 + return pod + + def _release_pod(self, pod: RunPodPod, reusable: bool) -> None: + self.releases.append((pod.id, reusable)) + + def _ensure_pod_ready(self, pod: RunPodPod) -> None: + pod.ready = True + + def _run_workspace_on_pod(self, pod: RunPodPod, workspace: Path, command: list[str], env: dict[str, str], timeout_sec: int, label: str) -> RemoteExecutionResult: + self.calls += 1 + if self.calls == 1: + raise RunPodPodLostError("interrupted") + return RemoteExecutionResult(stdout="val_bpb: 0.900000\n", stderr="", returncode=0, elapsed_sec=1.0) + + config = TTTAutoResearchConfig(execution_backend="runpod", runpod_retry_limit=2).normalized(Path(".")) + pool = FakePool(config) + with tempfile.TemporaryDirectory() as tmpdir: + workspace = Path(tmpdir) + (workspace / "train.py").write_text("print('ok')\n", encoding="utf-8") + result = pool.execute_workspace(workspace=workspace, command=["python", "train.py"], env={}, timeout_sec=10, label="candidate") + self.assertEqual(result.returncode, 0) + self.assertEqual(pool.releases, [("pod-0", False), ("pod-1", True)]) + + def test_runner_close_shuts_down_pool(self) -> None: + class FakePool: + def __init__(self) -> None: + self.closed = False + + def close(self) -> None: + self.closed = True + + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + pool = FakePool() + runner._runpod_pool = pool # type: ignore[assignment] + runner.close() + self.assertTrue(pool.closed) + + + def test_missing_exit_code_defaults_to_crash(self) -> None: + """If .exit_code is empty (disk full, process killed), returncode should be 1 not 0.""" + class FakePool(RunPodPool): + def __init__(self, config: TTTAutoResearchConfig) -> None: + self.config = config + self.repo_root = Path(".") + self.run_dir = Path(".") + self.lock = threading.Lock() + self.available: queue.Queue[RunPodPod] = queue.Queue() + self.created_pods: dict[str, RunPodPod] = {} + self.repo_archive_path = Path("repo.tar.gz") + self.repo_archive_lock = threading.Lock() + self.closed = False + self.sequence = 0 + + def _acquire_pod(self) -> RunPodPod: + return RunPodPod(id="pod-0", name="pod-0") + + def _release_pod(self, pod: RunPodPod, reusable: bool) -> None: + pass + + def _ensure_pod_ready(self, pod: RunPodPod) -> None: + pod.ready = True + + def _run_workspace_on_pod(self, pod: RunPodPod, workspace: Path, command: list[str], env: dict[str, str], timeout_sec: int, label: str) -> RemoteExecutionResult: + # Simulate: stdout present but .exit_code is empty (missing) + return RemoteExecutionResult(stdout="some output\n", stderr="", returncode=1, elapsed_sec=1.0) + + config = TTTAutoResearchConfig(execution_backend="runpod", runpod_retry_limit=1).normalized(Path(".")) + pool = FakePool(config) + with tempfile.TemporaryDirectory() as tmpdir: + workspace = Path(tmpdir) + (workspace / "train.py").write_text("print('ok')\n", encoding="utf-8") + result = pool.execute_workspace(workspace=workspace, command=["python", "train.py"], env={}, timeout_sec=10, label="candidate") + # The key assertion: missing .exit_code should NOT silently become returncode=0 + self.assertEqual(result.returncode, 1) + + def test_cleanup_orphaned_pods_on_init(self) -> None: + deleted_ids: list[str] = [] + + class FakeClient: + def delete_pod(self, pod_id: str) -> None: + deleted_ids.append(pod_id) + + with tempfile.TemporaryDirectory() as tmpdir: + run_dir = Path(tmpdir) + pool_state = [ + {"id": "orphan-aaa", "name": "orphan-0"}, + {"id": "orphan-bbb", "name": "orphan-1"}, + ] + (run_dir / "runpod_pool.json").write_text(json.dumps(pool_state), encoding="utf-8") + + config = TTTAutoResearchConfig(execution_backend="runpod", runpod_retry_limit=1).normalized(Path(".")) + # Build a minimal pool manually to test _cleanup_orphaned_pods in isolation + pool = object.__new__(RunPodPool) + pool.run_dir = run_dir + pool.config = config + pool.client = FakeClient() + pool._cleanup_orphaned_pods() + + self.assertEqual(sorted(deleted_ids), ["orphan-aaa", "orphan-bbb"]) + self.assertFalse((run_dir / "runpod_pool.json").exists()) + + def test_validate_ssh_key_rejects_missing_key(self) -> None: + config = TTTAutoResearchConfig( + execution_backend="runpod", + runpod_ssh_private_key_path="/nonexistent/path/to/key", + ).normalized(Path(".")) + pool = object.__new__(RunPodPool) + pool.config = config + with self.assertRaises(RunPodError): + pool._validate_ssh_key() + + def test_validate_ssh_key_accepts_existing_key(self) -> None: + with tempfile.NamedTemporaryFile(suffix=".pem", delete=False) as tmp: + tmp.write(b"fake-key-content") + key_path = tmp.name + try: + config = TTTAutoResearchConfig( + execution_backend="runpod", + runpod_ssh_private_key_path=key_path, + ).normalized(Path(".")) + pool = object.__new__(RunPodPool) + pool.config = config + # Should not raise + pool._validate_ssh_key() + finally: + os.unlink(key_path) + + +if __name__ == "__main__": + unittest.main() diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py index 34559926..a72a4ce7 100644 --- a/ttt_autoresearch/cli.py +++ b/ttt_autoresearch/cli.py @@ -41,50 +41,60 @@ def main(argv: list[str] | None = None) -> int: run_dir = Path(config.run_dir) runner = AutoResearchRunner(repo_root=repo_root, config=config, run_dir=run_dir) - bootstrap = runner.build_bootstrap(baseline_val_bpb=float("inf")) - baseline_result = runner.run_baseline(bootstrap=bootstrap) - if baseline_result.val_bpb is None: - parser.error(f"Baseline run failed with status={baseline_result.status}. Check {baseline_result.stdout_path} and {baseline_result.stderr_path}.") - - bootstrap = runner.build_bootstrap(baseline_val_bpb=baseline_result.val_bpb) - runner.initialize_best_from_baseline(baseline_result, bootstrap.baseline_train_py) - AutoResearchDiscoverEnv.configure(bootstrap) - AutoResearchRewardEvaluator.configure(bootstrap, runner) - write_resolved_config(run_dir / "resolved_config.json", config) - - dataset_config = DatasetConfig( - env_type=AutoResearchDiscoverEnv, - problem_type="autoresearch", - batch_size=config.groups_per_step, - group_size=config.samples_per_step, - model_name_for_tokenizer=config.local_model_path or config.model_name, - renderer_name=config.renderer_name, - num_cpus_per_task=config.num_cpus_per_task, - eval_timeout=config.eval_timeout, - log_path=str(bootstrap.discover_log_dir), - ) - dataset_builder = get_single_problem_dataset_builder(dataset_config) - # Keep discover's RL recipe unchanged and only swap in the autoresearch task surface. - rl_config = RLConfig( - env_type=AutoResearchDiscoverEnv, - problem_type="autoresearch", - learning_rate=config.learning_rate, - dataset_builder=dataset_builder, - model_name=config.model_name, - num_epochs=config.max_steps, - temperature=config.temperature, - lora_rank=config.lora_rank, - wandb_project=config.wandb_project, - wandb_name=config.experiment_name, - log_path=str(bootstrap.discover_log_dir), - kl_penalty_coef=config.kl_penalty_coef, - save_every=config.save_every, - remove_constant_reward_groups=True, - phase1_max_tokens=config.phase1_max_tokens, - local_model_path=config.local_model_path, - ) - asyncio.run(discover_main(rl_config)) - return 0 + try: + baseline_result = runner.load_existing_baseline_result() + if baseline_result is None or baseline_result.val_bpb is None: + bootstrap = runner.build_bootstrap(baseline_val_bpb=float("inf")) + baseline_result = runner.run_baseline(bootstrap=bootstrap) + if baseline_result.val_bpb is None: + parser.error(f"Baseline run failed with status={baseline_result.status}. Check {baseline_result.stdout_path} and {baseline_result.stderr_path}.") + + bootstrap = runner.build_bootstrap(baseline_val_bpb=baseline_result.val_bpb) + runner.initialize_best_from_baseline(baseline_result, bootstrap.baseline_train_py) + AutoResearchDiscoverEnv.configure(bootstrap) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + write_resolved_config(run_dir / "resolved_config.json", config) + + dataset_config = DatasetConfig( + env_type=AutoResearchDiscoverEnv, + problem_type="autoresearch", + batch_size=config.groups_per_step, + group_size=config.samples_per_step, + model_name_for_tokenizer=config.local_model_path or config.model_name, + renderer_name=config.renderer_name, + num_cpus_per_task=config.num_cpus_per_task, + eval_timeout=config.eval_timeout, + log_path=str(bootstrap.discover_log_dir), + ) + dataset_builder = get_single_problem_dataset_builder(dataset_config) + # Keep discover's RL recipe unchanged and only swap in the autoresearch task surface. + rl_config = RLConfig( + env_type=AutoResearchDiscoverEnv, + problem_type="autoresearch", + learning_rate=config.learning_rate, + dataset_builder=dataset_builder, + model_name=config.model_name, + num_epochs=config.max_steps, + temperature=config.temperature, + lora_rank=config.lora_rank, + adv_estimator="entropic_adaptive_beta", + adv_estimator_beta=2.0, + wandb_project=config.wandb_project, + wandb_name=config.experiment_name, + log_path=str(bootstrap.discover_log_dir), + kl_penalty_coef=config.kl_penalty_coef, + loss_fn="importance_sampling", + num_substeps=1, + save_every=config.save_every, + load_checkpoint_path=None, + remove_constant_reward_groups=True, + phase1_max_tokens=config.phase1_max_tokens, + local_model_path=config.local_model_path, + ) + asyncio.run(discover_main(rl_config)) + return 0 + finally: + runner.close() def _resolve_config_path(config_arg: str, repo_root: Path) -> Path: diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 1ad826d7..e6fdb548 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -22,9 +22,10 @@ @dataclass(slots=True) class TTTAutoResearchConfig: - model_name: str = "Qwen/Qwen3.5-35B-A3B" + model_name: str = "openai/gpt-oss-120b" provider: str | None = None api_base: str | None = None + target_val_bpb: float | None = 0.85 max_steps: int = 12 groups_per_step: int = 2 samples_per_step: int = 8 @@ -46,16 +47,43 @@ class TTTAutoResearchConfig: eval_timeout: int | None = None local_model_path: str | None = None keep_history: int = 6 - max_concurrent_evaluations: int = 1 + max_concurrent_evaluations: int = 16 gpu_devices: list[str] | None = None + execution_backend: str = "runpod" + runpod_api_key_env: str = "RUNPOD_API_KEY" + runpod_api_base: str = "https://rest.runpod.io/v1" + runpod_cloud_type: str = "COMMUNITY" + runpod_interruptible: bool = True + runpod_gpu_type_ids: list[str] | None = None + runpod_template_id: str | None = None + runpod_image_name: str | None = "runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04" + runpod_name_prefix: str = "autoresearch-ttt" + runpod_support_public_ip: bool = True + runpod_ports: list[str] | None = None + runpod_container_disk_gb: int = 50 + runpod_volume_gb: int = 0 + runpod_volume_mount_path: str = "/workspace" + runpod_ssh_user: str = "root" + runpod_ssh_private_key_path: str | None = None + runpod_repo_root: str = "/workspace/autoresearch" + runpod_prepare_num_shards: int = 10 + runpod_bootstrap_timeout_sec: int = 7200 + runpod_retry_limit: int = 3 + runpod_poll_interval_sec: int = 5 + runpod_bootstrap_commands: list[str] | None = None + runpod_terminate_on_close: bool = True def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": run_dir = _resolve_path(self.run_dir, repo_root) if self.run_dir else repo_root / "runs" / datetime.now().strftime("%Y%m%d_%H%M%S") experiment_name = self.experiment_name or run_dir.name + execution_backend = self.execution_backend.lower() + if execution_backend not in {"local", "runpod"}: + raise ValueError("execution_backend must be either 'local' or 'runpod'.") return TTTAutoResearchConfig( model_name=self.model_name, provider=self.provider, api_base=self.api_base, + target_val_bpb=self.target_val_bpb, max_steps=self.max_steps, groups_per_step=max(1, int(self.groups_per_step)), samples_per_step=self.samples_per_step, @@ -79,6 +107,29 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": keep_history=self.keep_history, max_concurrent_evaluations=max(1, int(self.max_concurrent_evaluations)), gpu_devices=_normalize_string_list(self.gpu_devices), + execution_backend=execution_backend, + runpod_api_key_env=self.runpod_api_key_env, + runpod_api_base=self.runpod_api_base.rstrip("/"), + runpod_cloud_type=self.runpod_cloud_type.upper(), + runpod_interruptible=bool(self.runpod_interruptible), + runpod_gpu_type_ids=_normalize_string_list(self.runpod_gpu_type_ids) or ["NVIDIA H100 PCIe"], + runpod_template_id=self.runpod_template_id, + runpod_image_name=self.runpod_image_name, + runpod_name_prefix=self.runpod_name_prefix, + runpod_support_public_ip=bool(self.runpod_support_public_ip), + runpod_ports=_normalize_string_list(self.runpod_ports) or ["22/tcp"], + runpod_container_disk_gb=max(20, int(self.runpod_container_disk_gb)), + runpod_volume_gb=max(0, int(self.runpod_volume_gb)), + runpod_volume_mount_path=self.runpod_volume_mount_path, + runpod_ssh_user=self.runpod_ssh_user, + runpod_ssh_private_key_path=_resolve_optional_path_str(self.runpod_ssh_private_key_path, repo_root), + runpod_repo_root=self.runpod_repo_root.rstrip("/"), + runpod_prepare_num_shards=max(2, int(self.runpod_prepare_num_shards)), + runpod_bootstrap_timeout_sec=max(300, int(self.runpod_bootstrap_timeout_sec)), + runpod_retry_limit=max(1, int(self.runpod_retry_limit)), + runpod_poll_interval_sec=max(1, int(self.runpod_poll_interval_sec)), + runpod_bootstrap_commands=_normalize_command(self.runpod_bootstrap_commands), + runpod_terminate_on_close=bool(self.runpod_terminate_on_close), ) def to_dict(self) -> dict[str, Any]: diff --git a/ttt_autoresearch/env.py b/ttt_autoresearch/env.py index b53e746f..63c2e08e 100644 --- a/ttt_autoresearch/env.py +++ b/ttt_autoresearch/env.py @@ -1,6 +1,5 @@ from __future__ import annotations -from dataclasses import dataclass from pathlib import Path import asyncio import json @@ -8,24 +7,10 @@ from ttt_autoresearch.config import BootstrapContext from ttt_autoresearch.discover_compat import Environment, State, VerifyResult +from ttt_autoresearch.prompt_builder import build_rollout_prompt from ttt_autoresearch.reward import AutoResearchRewardEvaluator from ttt_autoresearch.runner import parse_patch_candidate - -def read_recent_history(history_path: Path, limit: int) -> list[dict[str, Any]]: - if not history_path.exists(): - return [] - entries: list[dict[str, Any]] = [] - for line in history_path.read_text(encoding="utf-8").splitlines(): - if not line.strip(): - continue - try: - entries.append(json.loads(line)) - except json.JSONDecodeError: - continue - return entries[-limit:] - - class AutoResearchState(State): def __init__( self, @@ -39,9 +24,6 @@ def __init__( observation: str = "", baseline_val_bpb: float | None = None, current_best_val_bpb: float | None = None, - history: list[dict[str, Any]] | None = None, - summary: str = "", - rationale: str = "", raw_score: float | None = None, ) -> None: super().__init__( @@ -56,9 +38,6 @@ def __init__( ) self.baseline_val_bpb = baseline_val_bpb self.current_best_val_bpb = current_best_val_bpb - self.history = history or [] - self.summary = summary - self.rationale = rationale self.raw_score = raw_score @property @@ -69,6 +48,45 @@ def step(self) -> int: def current_train_py(self) -> str: return self.code + def to_prompt(self, target: float, metric_name: str = "value", maximize: bool = True, language: str = "") -> str: + value_ctx = f"You are iteratively optimizing {metric_name}." + improvement_direction = "higher" if maximize else "lower" + + has_code = self.code and self.code.strip() + if has_code: + value_ctx += "\nHere is the last code we ran:\n" + if language: + value_ctx += f"```{language}\n{self.code}\n```" + else: + value_ctx += self.code + else: + value_ctx += "\nNo previous code available." + + if self.parent_values and self.value is not None: + before_value = self.parent_values[0] if maximize else -self.parent_values[0] + after_value = self.value if maximize else -self.value + current_gap = target - after_value if maximize else after_value - target + value_ctx += ( + f"\nHere is the {metric_name} before and after running the code above ({improvement_direction} is better): " + f"{before_value:.6f} -> {after_value:.6f}" + ) + value_ctx += f"\nTarget: {target}. Current gap: {current_gap:.6f}. Further improvements will also be generously rewarded." + elif self.value is not None: + after_value = self.value if maximize else -self.value + current_gap = target - after_value if maximize else after_value - target + value_ctx += f"\nCurrent {metric_name} ({improvement_direction} is better): {after_value:.6f}" + value_ctx += f"\nTarget: {target}. Current gap: {current_gap:.6f}. Further improvements will also be generously rewarded." + else: + value_ctx += f"\nTarget {metric_name}: {target}" + + if self.observation and self.observation.strip(): + stdout = self.observation.strip() + if len(stdout) > 500: + stdout = "\n\n\t\t ...(TRUNCATED)...\n" + stdout[-500:] + value_ctx += f"\n\n--- Previous Program Output ---\n{stdout}\n--- End Output ---" + + return value_ctx + def to_dict(self) -> dict[str, Any]: payload = super().to_dict() payload.update( @@ -76,9 +94,6 @@ def to_dict(self) -> dict[str, Any]: "type": self.__class__.__name__, "baseline_val_bpb": self.baseline_val_bpb, "current_best_val_bpb": self.current_best_val_bpb, - "history": self.history, - "summary": self.summary, - "rationale": self.rationale, "raw_score": self.raw_score, } ) @@ -97,9 +112,6 @@ def from_dict(cls, data: dict[str, Any]) -> "AutoResearchState": observation=data.get("observation", ""), baseline_val_bpb=data.get("baseline_val_bpb"), current_best_val_bpb=data.get("current_best_val_bpb"), - history=data.get("history"), - summary=data.get("summary", ""), - rationale=data.get("rationale", ""), raw_score=data.get("raw_score"), ) @@ -145,9 +157,6 @@ def create_initial_state(cls, problem_type: str) -> AutoResearchState: observation=baseline_stdout, baseline_val_bpb=cls.bootstrap.baseline_val_bpb, current_best_val_bpb=current_best, - history=[], - summary="baseline", - rationale="seed state from the original autoresearch train.py", raw_score=current_best, ) @@ -165,45 +174,24 @@ def get_question(self) -> str: raise RuntimeError("AutoResearchDiscoverEnv is not configured.") state = self.initial_state - history = read_recent_history(self.bootstrap.history_path, self.bootstrap.config.keep_history) - if history: - history_text = "\n".join( - f"- [{entry['status']}] reward={entry['reward']:.6f} val_bpb={entry.get('val_bpb')} summary={entry['summary']}" - for entry in history - ) - else: - history_text = "- No prior candidate evaluations yet." - - return f"""{self.bootstrap.program_text} - -You are the outer autoresearch agent. Your only job is to improve train.py. -You may edit train.py only. Do not modify prepare.py, program.md, or any other file. -The reward comes from running train.py and measuring val_bpb. Lower val_bpb is better. - -Current best val_bpb: {state.current_best_val_bpb:.6f} -Baseline val_bpb: {state.baseline_val_bpb:.6f} - -Current best train.py: -```python -{state.current_train_py} -``` - -Recent accepted and rejected edits: -{history_text} - -Return exactly one ```json``` block with this schema: -{{ - "summary": "short description of the change", - "rationale": "why this should improve val_bpb", - "train_py": "the full replacement contents of train.py" -}} - -Rules: -- train_py must be the full file, not a diff. -- Only edit train.py. -- Keep the file runnable as a standalone script. -- Optimize for the lowest val_bpb under the existing time budget. -""" + target = self.bootstrap.config.target_val_bpb + if target is None: + target = state.current_best_val_bpb + state_ctx = state.to_prompt(target, metric_name="val_bpb", maximize=False, language="python") + return build_rollout_prompt( + state_ctx=state_ctx, + construction_section=( + "You may want to start your search from the current training script shown above.\n" + "This is the current starting point selected by the search procedure.\n" + "You are encouraged to explore meaningfully different directions if the current approach appears saturated." + ), + code_section=( + "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" + "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" + "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" + "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." + ), + ) def check_format(self, parsed_code: str) -> bool: try: @@ -215,7 +203,7 @@ def check_format(self, parsed_code: str) -> bool: async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: if not self.check_format(parsed_code): return VerifyResult( - reward=-1.0, + reward=0.0, msg="Invalid candidate JSON.", correctness=0.0, raw_score=float(self.initial_state.current_best_val_bpb), @@ -238,16 +226,6 @@ async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: def _create_next_state(self, step_idx: int, parsed_code: str, outs: VerifyResult) -> AutoResearchState: candidate = parse_patch_candidate(parsed_code) - history_entry = { - "step": step_idx, - "summary": candidate.summary, - "rationale": candidate.rationale, - "reward": outs.reward, - "val_bpb": outs.raw_score, - "status": outs.metrics.get("candidate_status", "unknown"), - } - prior_history = list(getattr(self.initial_state, "history", [])) - next_history = (prior_history + [history_entry])[-10:] parent_best = self.initial_state.current_best_val_bpb new_best = min(parent_best, outs.raw_score) if outs.raw_score is not None else parent_best return AutoResearchState( @@ -258,9 +236,6 @@ def _create_next_state(self, step_idx: int, parsed_code: str, outs: VerifyResult observation=outs.stdout, baseline_val_bpb=self.initial_state.baseline_val_bpb, current_best_val_bpb=new_best, - history=next_history, - summary=candidate.summary, - rationale=candidate.rationale, raw_score=outs.raw_score, ) diff --git a/ttt_autoresearch/prompt_builder.py b/ttt_autoresearch/prompt_builder.py new file mode 100644 index 00000000..692bca63 --- /dev/null +++ b/ttt_autoresearch/prompt_builder.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +def build_rollout_prompt( + *, + state_ctx: str, + construction_section: str, + code_section: str, +) -> str: + return f"""You are an expert machine learning researcher and systems engineer optimizing a language-model training script. + +Your task is to improve `train.py` so that it achieves a lower `val_bpb` under the fixed AutoResearch evaluation budget. + +## Problem + +Improve the `train.py` program so that the resulting training run achieves a lower validation bits-per-byte (`val_bpb`). + +Everything in `train.py` is fair game: +- architecture +- optimizer +- hyperparameters +- training loop +- batch size +- model size + +**Lower `val_bpb` values are better** - they indicate a stronger model under the fixed evaluation budget. + +## Budget & Resources +- **Time budget**: 5 minutes of wall-clock training time +- **Evaluation harness**: fixed AutoResearch runner +- **VRAM**: moderate increases are acceptable for meaningful gains, but avoid wasteful blowups + +## AutoResearch Invariants +- `prepare.py` and the evaluation protocol are fixed and cannot be changed +- Maximum sequence length is `2048` +- Validation uses the pinned shard `06542` +- The tokenizer / vocabulary setup is fixed at vocab size `8192` +- The training script must remain compatible with the existing BOS-aligned bin-packing data pipeline +- The model implementation must continue to support `forward(x, y, reduction='none')` + +## Rules +- You may only edit `train.py` +- Do not modify `prepare.py`, dependencies, or the evaluation harness +- Return exactly one ```json``` block with this schema: +{{ + "summary": "short description of the change", + "rationale": "why this should improve val_bpb", + "train_py": "the full replacement contents of train.py" +}} +- `train_py` must be the full file, not a diff +- Propose exactly one candidate for this rollout +- Optimize for the lowest `val_bpb` under the fixed time budget +- Prefer simpler changes when improvement is similar + +{state_ctx} +{construction_section} +{code_section} +""" diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py index d33f30dc..276a8514 100644 --- a/ttt_autoresearch/reward.py +++ b/ttt_autoresearch/reward.py @@ -13,17 +13,16 @@ _ARTIFACT_LOCK = threading.Lock() _EVALUATION_SLOTS: threading.BoundedSemaphore | None = None _GPU_DEVICE_QUEUE: queue.Queue[str] | None = None +_REWARD_EPSILON = 1e-8 +_FAIL_REWARD = 0.0 +_FAIL_RAW_SCORE = 1e9 -def reward_for_result(current_best_val_bpb: float, result: RunResult) -> tuple[float, float]: - if result.status == "timeout": - return -0.5, 0.0 - if result.status == "missing_metric": - return -0.75, 0.0 +def reward_for_result(result: RunResult) -> tuple[float, float]: if result.status != "success" or result.val_bpb is None: - return -1.0, 0.0 - reward = current_best_val_bpb - result.val_bpb - correctness = 1.0 if reward > 0 else 0.0 + return _FAIL_REWARD, 0.0 + reward = 1.0 / (_REWARD_EPSILON + result.val_bpb) + correctness = 1.0 return reward, correctness @@ -38,6 +37,8 @@ def configure(cls, bootstrap: BootstrapContext, runner: AutoResearchRunner) -> N cls.runner = runner _EVALUATION_SLOTS = threading.BoundedSemaphore(bootstrap.config.max_concurrent_evaluations) _GPU_DEVICE_QUEUE = None + if bootstrap.config.execution_backend == "runpod": + return gpu_devices = bootstrap.config.gpu_devices or [] if bootstrap.config.max_concurrent_evaluations > 1: if not gpu_devices: @@ -66,16 +67,15 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: try: candidate = parse_patch_candidate(code) except ValueError as exc: - return self._failure_payload( - reward=-1.0, - raw_score=self._current_best_from_state(state), - msg=f"Invalid candidate payload: {exc}", - status="invalid_candidate", + return self._persist_invalid_candidate( + code=code, + state=state, + error_message=f"Invalid candidate payload: {exc}", ) result = self._run_candidate(candidate, state) current_best = self._current_best_from_state(state) - reward, correctness = reward_for_result(current_best, result) + reward, correctness = reward_for_result(result) improved_global_best = False with _ARTIFACT_LOCK: @@ -93,7 +93,7 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "summary": candidate.summary, "rationale": candidate.rationale, "reward": reward, - "accepted": bool(correctness), + "accepted": bool(result.status == "success" and result.val_bpb is not None), "val_bpb": result.val_bpb, "parent_val_bpb": current_best, "stdout_path": str(result.stdout_path), @@ -102,10 +102,30 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "improved_global_best": improved_global_best, } self.runner.append_history(history_entry) + self.runner.write_rollout_manifest( + result.workspace_path, + { + "step": getattr(state, "timestep", -1) + 1, + "starting_state": state.to_dict() if hasattr(state, "to_dict") else { + "id": getattr(state, "id", "unknown"), + "timestep": getattr(state, "timestep", -1), + }, + "candidate": { + "summary": candidate.summary, + "rationale": candidate.rationale, + "train_py": candidate.train_py, + }, + "evaluation": result.to_dict(), + "reward": reward, + "correctness": correctness, + "message": self._build_message(candidate, result, current_best, reward), + "improved_global_best": improved_global_best, + }, + ) message = self._build_message(candidate, result, current_best, reward) stdout = self.runner.read_text(result.stdout_path) - raw_score = result.val_bpb if result.val_bpb is not None else current_best + raw_score = result.val_bpb if result.val_bpb is not None else _FAIL_RAW_SCORE return { "reward": float(reward), "msg": message, @@ -125,6 +145,70 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: }, } + def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) -> dict[str, Any]: + if self.runner is None: + raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + step = getattr(state, "timestep", -1) + 1 + state_id = getattr(state, "id", "unknown") + current_best = self._current_best_from_state(state) + with _ARTIFACT_LOCK: + workspace = self.runner.create_candidate_artifact_dir(step=step, prefix="invalid") + response_path = workspace / "response.txt" + response_path.write_text(code, encoding="utf-8") + metrics_path = workspace / "metrics.json" + self.runner.write_json_artifact( + metrics_path, + { + "candidate_status": "invalid_candidate", + "error": error_message, + }, + ) + history_entry = { + "step": step, + "state_id": state_id, + "status": "invalid_candidate", + "summary": "", + "rationale": "", + "reward": _FAIL_REWARD, + "accepted": False, + "val_bpb": None, + "parent_val_bpb": current_best, + "stdout_path": "", + "stderr_path": "", + "workspace_path": str(workspace), + "improved_global_best": False, + "error": error_message, + } + self.runner.append_history(history_entry) + self.runner.write_rollout_manifest( + workspace, + { + "step": step, + "starting_state": state.to_dict() if hasattr(state, "to_dict") else { + "id": state_id, + "timestep": getattr(state, "timestep", -1), + }, + "candidate": None, + "raw_response_path": str(response_path), + "raw_response": code, + "evaluation": { + "status": "invalid_candidate", + "workspace_path": str(workspace), + "metrics_path": str(metrics_path), + }, + "reward": _FAIL_REWARD, + "correctness": 0.0, + "message": error_message, + "improved_global_best": False, + }, + ) + return self._failure_payload( + reward=_FAIL_REWARD, + raw_score=_FAIL_RAW_SCORE, + msg=error_message, + status="invalid_candidate", + ) + def _run_candidate(self, candidate: PatchCandidate, state: Any) -> RunResult: if self.bootstrap is None or self.runner is None: raise RuntimeError("AutoResearchRewardEvaluator is not configured.") diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index 0fd7ef19..8c8e3436 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -13,6 +13,7 @@ from typing import Any from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig +from ttt_autoresearch.runpod import RunPodPool VAL_BPB_RE = re.compile(r"^val_bpb:\s*([-+]?(?:\d+\.?\d*|\.\d+)(?:[eE][-+]?\d+)?)", re.MULTILINE) @@ -82,6 +83,7 @@ def __init__(self, repo_root: Path, config: TTTAutoResearchConfig, run_dir: Path self.repo_root = repo_root self.config = config self.run_dir = run_dir + self._runpod_pool: RunPodPool | None = None self.run_dir.mkdir(parents=True, exist_ok=True) (self.run_dir / "baseline").mkdir(exist_ok=True) (self.run_dir / "candidates").mkdir(exist_ok=True) @@ -89,7 +91,7 @@ def __init__(self, repo_root: Path, config: TTTAutoResearchConfig, run_dir: Path def build_bootstrap(self, baseline_val_bpb: float) -> BootstrapContext: program_text = (self.repo_root / "program.md").read_text(encoding="utf-8") - baseline_train_py = (self.repo_root / "train.py").read_text(encoding="utf-8") + baseline_train_py = self._load_baseline_train_py() return BootstrapContext( repo_root=self.repo_root, run_dir=self.run_dir, @@ -99,9 +101,33 @@ def build_bootstrap(self, baseline_val_bpb: float) -> BootstrapContext: baseline_val_bpb=baseline_val_bpb, ) + def load_existing_baseline_result(self) -> RunResult | None: + baseline_path = self.run_dir / "baseline.json" + if not baseline_path.exists(): + return None + try: + payload = json.loads(baseline_path.read_text(encoding="utf-8")) + except json.JSONDecodeError: + return None + try: + return RunResult( + status=str(payload["status"]), + val_bpb=float(payload["val_bpb"]) if payload.get("val_bpb") is not None else None, + stdout_path=Path(payload["stdout_path"]), + stderr_path=Path(payload["stderr_path"]), + elapsed_sec=float(payload["elapsed_sec"]), + workspace_path=Path(payload["workspace_path"]), + metrics_path=Path(payload["metrics_path"]) if payload.get("metrics_path") else None, + command=[str(part) for part in payload.get("command", [])], + returncode=int(payload["returncode"]) if payload.get("returncode") is not None else None, + ) + except (KeyError, TypeError, ValueError): + return None + def run_baseline(self, bootstrap: BootstrapContext | None = None) -> RunResult: workspace = self.run_dir / "baseline" / "workspace" self._copy_repo(workspace) + (self.run_dir / "baseline" / "train.py").write_text((workspace / "train.py").read_text(encoding="utf-8"), encoding="utf-8") result = self._execute_workspace( workspace=workspace, command_template=self.config.baseline_command_override, @@ -132,6 +158,12 @@ def run_candidate( ) return result + def create_candidate_artifact_dir(self, step: int, prefix: str = "candidate") -> Path: + label = prefix.replace(" ", "_") + workspace = self.run_dir / "candidates" / f"{step:04d}_{label}_{uuid.uuid4().hex[:8]}" + workspace.mkdir(parents=True, exist_ok=False) + return workspace + def initialize_best_from_baseline(self, baseline_result: RunResult, train_py_text: str) -> None: if baseline_result.val_bpb is None: return @@ -173,6 +205,20 @@ def append_history(self, entry: dict[str, Any]) -> None: with history_path.open("a", encoding="utf-8") as handle: handle.write(json.dumps(entry, sort_keys=True) + "\n") + def write_rollout_manifest(self, workspace: Path, payload: dict[str, Any]) -> Path: + path = workspace / "rollout_manifest.json" + self._write_json(path, payload) + return path + + def write_json_artifact(self, path: Path, payload: dict[str, Any]) -> Path: + self._write_json(path, payload) + return path + + def close(self) -> None: + if self._runpod_pool is not None: + self._runpod_pool.close() + self._runpod_pool = None + def _copy_repo(self, workspace: Path) -> None: if workspace.exists(): shutil.rmtree(workspace) @@ -193,32 +239,51 @@ def _execute_workspace( ) -> RunResult: command = self._resolve_command(command_template, workspace, bootstrap, label, state_id) env = bootstrap.subprocess_env() if bootstrap else dict(os.environ) - if gpu_device is not None: + if gpu_device is not None and self.config.execution_backend == "local": env["CUDA_VISIBLE_DEVICES"] = gpu_device stdout_path = workspace / "stdout.log" stderr_path = workspace / "stderr.log" metrics_path = workspace / "metrics.json" start = time.time() - try: - proc = subprocess.run( - command, - cwd=workspace, + if self.config.execution_backend == "runpod": + pool = self._get_runpod_pool() + remote_result = pool.execute_workspace( + workspace=workspace, + command=command, env=env, - timeout=self.config.timeout_sec, - text=True, - capture_output=True, - check=False, + timeout_sec=self.config.timeout_sec, + label=label, ) - stdout = proc.stdout - stderr = proc.stderr - returncode = proc.returncode - status = "success" if returncode == 0 else "crash" - except subprocess.TimeoutExpired as exc: - stdout = exc.stdout or "" - stderr = exc.stderr or "" - returncode = None - status = "timeout" - elapsed_sec = time.time() - start + stdout = remote_result.stdout + stderr = remote_result.stderr + returncode = remote_result.returncode + elapsed_sec = remote_result.elapsed_sec + if returncode == 124: + status = "timeout" + returncode = None + else: + status = "success" if returncode == 0 else "crash" + else: + try: + proc = subprocess.run( + command, + cwd=workspace, + env=env, + timeout=self.config.timeout_sec, + text=True, + capture_output=True, + check=False, + ) + stdout = proc.stdout + stderr = proc.stderr + returncode = proc.returncode + status = "success" if returncode == 0 else "crash" + except subprocess.TimeoutExpired as exc: + stdout = exc.stdout or "" + stderr = exc.stderr or "" + returncode = None + status = "timeout" + elapsed_sec = time.time() - start stdout_path.write_text(stdout, encoding="utf-8") stderr_path.write_text(stderr, encoding="utf-8") @@ -244,6 +309,11 @@ def _execute_workspace( returncode=returncode, ) + def _get_runpod_pool(self) -> RunPodPool: + if self._runpod_pool is None: + self._runpod_pool = RunPodPool(repo_root=self.repo_root, run_dir=self.run_dir, config=self.config) + return self._runpod_pool + def _read_val_bpb(self, stdout: str, metrics_path: Path) -> float | None: direct = parse_val_bpb(stdout) if direct is not None: @@ -291,3 +361,13 @@ def read_text(path: Path, max_chars: int = 4000) -> str: @staticmethod def _write_json(path: Path, payload: dict[str, Any]) -> None: path.write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + + def _load_baseline_train_py(self) -> str: + for candidate in ( + self.run_dir / "baseline" / "train.py", + self.run_dir / "baseline" / "workspace" / "train.py", + self.repo_root / "train.py", + ): + if candidate.exists(): + return candidate.read_text(encoding="utf-8") + raise FileNotFoundError("Could not locate baseline train.py in either the run directory or repo root.") diff --git a/ttt_autoresearch/runpod.py b/ttt_autoresearch/runpod.py new file mode 100644 index 00000000..fbba5f9a --- /dev/null +++ b/ttt_autoresearch/runpod.py @@ -0,0 +1,616 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +import json +import os +import queue +import shlex +import subprocess +import tarfile +import tempfile +import threading +import time +import urllib.error +import urllib.parse +import urllib.request +import uuid + +from ttt_autoresearch.config import TTTAutoResearchConfig + + +class RunPodError(RuntimeError): + pass + + +class RunPodAPIError(RunPodError): + pass + + +class RunPodPodLostError(RunPodError): + pass + + +@dataclass(slots=True) +class RemoteExecutionResult: + stdout: str + stderr: str + returncode: int | None + elapsed_sec: float + + +@dataclass(slots=True) +class RunPodPod: + id: str + name: str + public_ip: str | None = None + ssh_port: int | None = None + desired_status: str | None = None + machine_id: str | None = None + ready: bool = False + + +class RunPodAPIClient: + def __init__(self, config: TTTAutoResearchConfig) -> None: + api_key = os.environ.get(config.runpod_api_key_env) + if not api_key: + raise RunPodAPIError( + f"{config.runpod_api_key_env} is not set. Export your RunPod API key before starting a run." + ) + self.api_key = api_key + self.base_url = config.runpod_api_base.rstrip("/") + + def list_pods(self) -> list[dict[str, object]]: + payload = self._request("GET", "/pods") + pods = payload.get("pods", payload) + if isinstance(pods, list): + return [pod for pod in pods if isinstance(pod, dict)] + raise RunPodAPIError(f"Unexpected /pods response: {payload!r}") + + def create_pod(self, body: dict[str, object]) -> dict[str, object]: + payload = self._request("POST", "/pods", body) + if isinstance(payload, dict): + return payload + raise RunPodAPIError(f"Unexpected create pod response: {payload!r}") + + def delete_pod(self, pod_id: str) -> None: + try: + self._request("DELETE", f"/pods/{pod_id}") + except RunPodAPIError as exc: + message = str(exc) + if "404" in message: + return + raise + + def _request(self, method: str, path: str, body: dict[str, object] | None = None) -> dict[str, object]: + url = f"{self.base_url}{path}" + data = None + headers = { + "Authorization": f"Bearer {self.api_key}", + "Accept": "application/json", + } + if body is not None: + data = json.dumps(body).encode("utf-8") + headers["Content-Type"] = "application/json" + request = urllib.request.Request(url, method=method, data=data, headers=headers) + try: + with urllib.request.urlopen(request, timeout=60) as response: + raw = response.read().decode("utf-8") or "{}" + except urllib.error.HTTPError as exc: + detail = exc.read().decode("utf-8", errors="replace") + raise RunPodAPIError(f"{method} {path} failed with HTTP {exc.code}: {detail}") from exc + except urllib.error.URLError as exc: + raise RunPodAPIError(f"{method} {path} failed: {exc}") from exc + try: + parsed = json.loads(raw) + except json.JSONDecodeError as exc: + raise RunPodAPIError(f"{method} {path} returned invalid JSON: {raw!r}") from exc + if isinstance(parsed, dict): + return parsed + raise RunPodAPIError(f"{method} {path} returned unexpected payload: {parsed!r}") + + +class RunPodPool: + def __init__(self, repo_root: Path, run_dir: Path, config: TTTAutoResearchConfig) -> None: + self.repo_root = repo_root + self.run_dir = run_dir + self.config = config + self.client = RunPodAPIClient(config) + self.lock = threading.Lock() + self.available: queue.Queue[RunPodPod] = queue.Queue() + self.created_pods: dict[str, RunPodPod] = {} + self.repo_archive_path = self.run_dir / "runpod_repo_bundle.tar.gz" + self.repo_archive_lock = threading.Lock() + self.closed = False + self.sequence = 0 + self._validate_ssh_key() + self._cleanup_orphaned_pods() + self._write_repo_archive() + + def _validate_ssh_key(self) -> None: + key_path = self.config.runpod_ssh_private_key_path + if key_path: + if not Path(key_path).exists(): + raise RunPodError(f"SSH private key not found at {key_path}") + return + # No explicit key — check that the system default exists. + default_keys = [Path.home() / ".ssh" / name for name in ("id_ed25519", "id_rsa", "id_ecdsa")] + has_agent = bool(os.environ.get("SSH_AUTH_SOCK")) + has_default = any(key.exists() for key in default_keys) + if not has_agent and not has_default: + raise RunPodError( + "No SSH private key configured for RunPod. " + "Set runpod_ssh_private_key_path in your config, or ensure a default key " + "exists at ~/.ssh/id_ed25519 (or id_rsa), or run an ssh-agent." + ) + + def _cleanup_orphaned_pods(self) -> None: + pool_state_path = self.run_dir / "runpod_pool.json" + if not pool_state_path.exists(): + return + try: + entries = json.loads(pool_state_path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError): + return + if not isinstance(entries, list): + return + orphan_ids = [str(entry["id"]) for entry in entries if isinstance(entry, dict) and "id" in entry] + if not orphan_ids: + return + cleaned = 0 + for pod_id in orphan_ids: + try: + self.client.delete_pod(pod_id) + cleaned += 1 + except RunPodAPIError: + pass + if cleaned: + print(f"[RunPodPool] Cleaned up {cleaned} orphaned pod(s) from a previous run.") + pool_state_path.unlink(missing_ok=True) + + def execute_workspace( + self, + workspace: Path, + command: list[str], + env: dict[str, str], + timeout_sec: int, + label: str, + ) -> RemoteExecutionResult: + last_error: Exception | None = None + for _ in range(self.config.runpod_retry_limit): + pod = self._acquire_pod() + reusable = True + try: + self._ensure_pod_ready(pod) + result = self._run_workspace_on_pod(pod, workspace, command, env, timeout_sec, label) + self._release_pod(pod, reusable=True) + return result + except RunPodPodLostError as exc: + reusable = False + last_error = exc + self._release_pod(pod, reusable=False) + except Exception: + self._release_pod(pod, reusable=reusable) + raise + raise RunPodPodLostError(f"RunPod spot pod was interrupted too many times while running {label}.") from last_error + + def close(self) -> None: + with self.lock: + if self.closed: + return + self.closed = True + pods = list(self.created_pods.values()) + self.created_pods.clear() + if not self.config.runpod_terminate_on_close: + return + for pod in pods: + try: + self.client.delete_pod(pod.id) + except RunPodAPIError: + continue + + def _acquire_pod(self) -> RunPodPod: + try: + return self.available.get_nowait() + except queue.Empty: + pass + + with self.lock: + if self.closed: + raise RunPodError("RunPod pool is closed.") + if len(self.created_pods) < self.config.max_concurrent_evaluations: + pod = self._create_pod() + self.created_pods[pod.id] = pod + self._write_pool_state() + return pod + + return self.available.get() + + def _release_pod(self, pod: RunPodPod, reusable: bool) -> None: + if not reusable: + with self.lock: + self.created_pods.pop(pod.id, None) + self._write_pool_state() + try: + self.client.delete_pod(pod.id) + except RunPodAPIError: + pass + return + if not self.closed: + self.available.put(pod) + + def _create_pod(self) -> RunPodPod: + ordinal = self.sequence + self.sequence += 1 + pod_name = f"{self.config.runpod_name_prefix}-{Path(self.run_dir).name}-{ordinal:02d}" + body: dict[str, object] = { + "name": pod_name, + "cloudType": self.config.runpod_cloud_type, + "interruptible": self.config.runpod_interruptible, + "supportPublicIp": self.config.runpod_support_public_ip, + "ports": self.config.runpod_ports, + "containerDiskInGb": self.config.runpod_container_disk_gb, + } + if self.config.runpod_volume_gb > 0: + body["volumeInGb"] = self.config.runpod_volume_gb + body["volumeMountPath"] = self.config.runpod_volume_mount_path + if self.config.runpod_template_id: + body["templateId"] = self.config.runpod_template_id + else: + body["imageName"] = self.config.runpod_image_name + body["gpuTypeIds"] = self.config.runpod_gpu_type_ids + payload = self.client.create_pod(body) + pod = self._pod_from_payload(payload) + if pod.id == "": + raise RunPodAPIError(f"Could not parse pod id from create response: {payload!r}") + return pod + + def _ensure_pod_ready(self, pod: RunPodPod) -> None: + refreshed = self._wait_for_pod_network(pod.id) + pod.public_ip = refreshed.public_ip + pod.ssh_port = refreshed.ssh_port + pod.desired_status = refreshed.desired_status + pod.machine_id = refreshed.machine_id + self._wait_for_ssh(pod) + if pod.ready: + return + self._bootstrap_pod(pod) + pod.ready = True + + def _wait_for_pod_network(self, pod_id: str) -> RunPodPod: + deadline = time.time() + self.config.runpod_bootstrap_timeout_sec + while time.time() < deadline: + payload = self._lookup_pod(pod_id) + if payload is None: + raise RunPodPodLostError(f"Pod {pod_id} disappeared before it became ready.") + pod = self._pod_from_payload(payload) + if pod.public_ip and pod.ssh_port: + return pod + time.sleep(self.config.runpod_poll_interval_sec) + raise RunPodPodLostError(f"Pod {pod_id} never exposed SSH before the bootstrap timeout.") + + def _wait_for_ssh(self, pod: RunPodPod) -> None: + deadline = time.time() + self.config.runpod_bootstrap_timeout_sec + while time.time() < deadline: + try: + completed = self._run_ssh(pod, "true", timeout=30, check=False) + except RunPodPodLostError: + raise + if completed.returncode == 0: + return + time.sleep(self.config.runpod_poll_interval_sec) + raise RunPodPodLostError(f"Pod {pod.id} exposed a public IP but never accepted SSH.") + + def _bootstrap_pod(self, pod: RunPodPod) -> None: + remote_archive = "/tmp/autoresearch_repo_bundle.tar.gz" + self._upload_file(pod, self.repo_archive_path, remote_archive) + repo_root = self.config.runpod_repo_root + bootstrap_commands = self.config.runpod_bootstrap_commands or [ + "python3 -m pip install --upgrade uv", + "cd {repo_root} && uv sync", + "cd {repo_root} && uv run prepare.py --num-shards {prepare_num_shards}", + ] + rendered = [ + command.format( + repo_root=repo_root, + prepare_num_shards=self.config.runpod_prepare_num_shards, + ) + for command in bootstrap_commands + ] + script_lines = [ + "set -euo pipefail", + f"rm -rf {shlex.quote(repo_root)}", + f"mkdir -p {shlex.quote(repo_root)}", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)} --strip-components=1", + ] + script_lines.extend(rendered) + self._run_ssh( + pod, + "\n".join(script_lines), + timeout=self.config.runpod_bootstrap_timeout_sec, + check=True, + ) + + def _run_workspace_on_pod( + self, + pod: RunPodPod, + workspace: Path, + command: list[str], + env: dict[str, str], + timeout_sec: int, + label: str, + ) -> RemoteExecutionResult: + remote_workspace = f"{self.config.runpod_repo_root}/../jobs/{label}-{uuid.uuid4().hex[:8]}" + remote_archive = f"/tmp/{uuid.uuid4().hex}.tar.gz" + local_archive = self._build_workspace_archive(workspace) + try: + self._upload_file(pod, local_archive, remote_archive) + env_lines = [] + for key in sorted(env): + if key.startswith("RUNPOD_"): + continue + env_lines.append(f"export {key}={shlex.quote(env[key])}") + env_lines.append("export PYTHONUNBUFFERED=1") + command_str = " ".join(shlex.quote(part) for part in command) + script = "\n".join( + [ + "set -uo pipefail", + f"rm -rf {shlex.quote(remote_workspace)}", + f"mkdir -p {shlex.quote(remote_workspace)}", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)} --strip-components=1", + f"cd {shlex.quote(remote_workspace)}", + *env_lines, + f"timeout --kill-after=30s {timeout_sec}s {command_str} > stdout.log 2> stderr.log", + "rc=$?", + 'printf "%s" "$rc" > .exit_code', + "exit 0", + ] + ) + start = time.time() + self._run_ssh(pod, script, timeout=timeout_sec + 180, check=True) + elapsed = time.time() - start + stdout = self._download_text_file(pod, f"{remote_workspace}/stdout.log") + stderr = self._download_text_file(pod, f"{remote_workspace}/stderr.log") + exit_text = self._download_text_file(pod, f"{remote_workspace}/.exit_code").strip() + # If .exit_code is missing or empty, assume the process crashed rather + # than silently treating it as success (returncode=0). + if exit_text: + try: + returncode = int(exit_text) + except ValueError: + returncode = 1 + else: + returncode = 1 + metrics_json = self._download_text_file(pod, f"{remote_workspace}/metrics.json") + if metrics_json: + metrics_dest = workspace / "metrics.json" + metrics_dest.write_text(metrics_json, encoding="utf-8") + return RemoteExecutionResult( + stdout=stdout, + stderr=stderr, + returncode=returncode, + elapsed_sec=elapsed, + ) + finally: + try: + self._run_ssh( + pod, + "\n".join( + [ + "set -e", + f"rm -rf {shlex.quote(remote_workspace)}", + f"rm -f {shlex.quote(remote_archive)}", + ] + ), + timeout=60, + check=False, + ) + except RunPodPodLostError: + pass + local_archive.unlink(missing_ok=True) + + def _upload_file(self, pod: RunPodPod, local_path: Path, remote_path: str) -> None: + destination = f"{self.config.runpod_ssh_user}@{pod.public_ip}:{remote_path}" + try: + completed = subprocess.run( + self._scp_base_args(pod) + [str(local_path), destination], + text=True, + capture_output=True, + timeout=600, + check=False, + ) + except subprocess.TimeoutExpired as exc: + if self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} disappeared while uploading {local_path.name}.") from exc + raise RunPodError(f"Timed out uploading {local_path.name} to pod {pod.id}.") from exc + if completed.returncode != 0: + if self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} disappeared while uploading {local_path.name}.") + raise RunPodError(completed.stderr.strip() or f"scp upload to {pod.id} failed.") + + def _download_text_file(self, pod: RunPodPod, remote_path: str) -> str: + with tempfile.TemporaryDirectory() as tmpdir: + local_path = Path(tmpdir) / Path(remote_path).name + source = f"{self.config.runpod_ssh_user}@{pod.public_ip}:{remote_path}" + try: + completed = subprocess.run( + self._scp_base_args(pod) + [source, str(local_path)], + text=True, + capture_output=True, + timeout=600, + check=False, + ) + except subprocess.TimeoutExpired as exc: + if self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} disappeared while downloading artifacts.") from exc + raise RunPodError(f"Timed out downloading {remote_path} from pod {pod.id}.") from exc + if completed.returncode != 0: + if self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} disappeared while downloading artifacts.") + return "" + return local_path.read_text(encoding="utf-8") + + def _run_ssh(self, pod: RunPodPod, script: str, timeout: int, check: bool) -> subprocess.CompletedProcess[str]: + try: + completed = subprocess.run( + self._ssh_base_args(pod) + ["bash", "-lc", script], + text=True, + capture_output=True, + timeout=timeout, + check=False, + ) + except subprocess.TimeoutExpired as exc: + if self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} was interrupted during remote execution.") from exc + raise RunPodError(f"Timed out waiting for remote SSH command on pod {pod.id}.") from exc + if completed.returncode == 255 and self._pod_missing(pod.id): + raise RunPodPodLostError(f"Pod {pod.id} was interrupted during remote execution.") + if check and completed.returncode != 0: + raise RunPodError(completed.stderr.strip() or completed.stdout.strip() or f"Remote command failed on pod {pod.id}.") + return completed + + def _ssh_base_args(self, pod: RunPodPod) -> list[str]: + if not pod.public_ip or pod.ssh_port is None: + raise RunPodPodLostError(f"Pod {pod.id} does not have a reachable SSH endpoint.") + args = [ + "ssh", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "ConnectTimeout=10", + ] + if self.config.runpod_ssh_private_key_path: + args.extend(["-i", self.config.runpod_ssh_private_key_path]) + args.extend( + [ + "-p", + str(pod.ssh_port), + f"{self.config.runpod_ssh_user}@{pod.public_ip}", + ] + ) + return args + + def _scp_base_args(self, pod: RunPodPod) -> list[str]: + if not pod.public_ip or pod.ssh_port is None: + raise RunPodPodLostError(f"Pod {pod.id} does not have a reachable SSH endpoint.") + args = [ + "scp", + "-O", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "ConnectTimeout=10", + ] + if self.config.runpod_ssh_private_key_path: + args.extend(["-i", self.config.runpod_ssh_private_key_path]) + args.extend(["-P", str(pod.ssh_port)]) + return args + + def _lookup_pod(self, pod_id: str) -> dict[str, object] | None: + for payload in self.client.list_pods(): + if str(payload.get("id", "")) == pod_id: + return payload + return None + + def _pod_missing(self, pod_id: str) -> bool: + return self._lookup_pod(pod_id) is None + + def _write_repo_archive(self) -> None: + with self.repo_archive_lock: + with tarfile.open(self.repo_archive_path, "w:gz") as archive: + for path in self.repo_root.rglob("*"): + if not path.is_file(): + continue + rel = path.relative_to(self.repo_root) + if self._should_skip(rel): + continue + archive.add(path, arcname=str(rel)) + + def _build_workspace_archive(self, workspace: Path) -> Path: + fd, archive_path = tempfile.mkstemp(prefix="workspace_", suffix=".tar.gz") + os.close(fd) + archive_file = Path(archive_path) + with tarfile.open(archive_file, "w:gz") as archive: + for path in workspace.rglob("*"): + if not path.is_file(): + continue + rel = path.relative_to(workspace) + if self._should_skip(rel): + continue + archive.add(path, arcname=str(rel)) + return archive_file + + @staticmethod + def _should_skip(rel: Path) -> bool: + parts = rel.parts + if not parts: + return False + if parts[0] in {".git", "runs", "__pycache__", ".pytest_cache", ".venv"}: + return True + return rel.suffix in {".pyc", ".pyo"} + + def _write_pool_state(self) -> None: + payload = [ + { + "id": pod.id, + "name": pod.name, + "public_ip": pod.public_ip, + "ssh_port": pod.ssh_port, + "desired_status": pod.desired_status, + "machine_id": pod.machine_id, + "ready": pod.ready, + } + for pod in self.created_pods.values() + ] + (self.run_dir / "runpod_pool.json").write_text(json.dumps(payload, indent=2, sort_keys=True) + "\n", encoding="utf-8") + + @staticmethod + def _pod_from_payload(payload: dict[str, object]) -> RunPodPod: + runtime = payload.get("runtime") + public_ip: str | None = None + ssh_port: int | None = None + + if isinstance(runtime, dict): + ports = runtime.get("ports") + if isinstance(ports, list): + for port in ports: + if not isinstance(port, dict): + continue + private = str(port.get("privatePort", "")) + protocol = str(port.get("type", port.get("protocol", ""))).lower() + if private == "22" and "tcp" in protocol: + ip_value = port.get("ip") + if ip_value is not None: + public_ip = str(ip_value) + public_port = port.get("publicPort") + if public_port is not None: + ssh_port = int(public_port) + break + if public_ip is None: + ip_value = runtime.get("publicIp") or runtime.get("ip") + if ip_value is not None: + public_ip = str(ip_value) + if ssh_port is None: + mappings = runtime.get("portMappings") + if isinstance(mappings, dict): + for key, value in mappings.items(): + if str(key) == "22" and value is not None: + ssh_port = int(value) + break + + return RunPodPod( + id=str(payload.get("id", "")), + name=str(payload.get("name", "")), + public_ip=public_ip, + ssh_port=ssh_port, + desired_status=str(payload.get("desiredStatus", payload.get("status", ""))), + machine_id=str(payload.get("machineId", "")) or None, + ) From 772819e03ec58250d9072af72178895fbd2fbd1a Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Wed, 11 Mar 2026 18:19:41 +1100 Subject: [PATCH 09/17] Harden rollout patching and Hyperbolic launch flow --- .gitignore | 2 + README.md | 236 ++++++--- configs/ttt_discover_autoresearch.yaml | 52 +- configs/ttt_discover_autoresearch_large.yaml | 48 +- configs/ttt_discover_autoresearch_medium.yaml | 48 +- configs/ttt_discover_autoresearch_small.yaml | 46 +- pyproject.toml | 5 +- tests/test_cli_integration.py | 57 ++ tests/test_env_smoke.py | 24 +- tests/test_hyperbolic.py | 158 ++++++ tests/test_prompt_builder.py | 19 +- tests/test_reward.py | 118 ++++- tests/test_runner.py | 129 ++++- ttt_autoresearch/cli.py | 77 ++- ttt_autoresearch/config.py | 55 +- ttt_autoresearch/discover_compat.py | 121 +++++ ttt_autoresearch/env.py | 28 +- ttt_autoresearch/hyperbolic.py | 491 ++++++++++++++++++ ttt_autoresearch/hyperbolic_mirror.py | 165 ++++++ ttt_autoresearch/prompt_builder.py | 63 ++- ttt_autoresearch/reward.py | 206 +++++++- ttt_autoresearch/runner.py | 453 +++++++++++++++- ttt_autoresearch/runpod.py | 13 +- uv.lock | 76 ++- 24 files changed, 2377 insertions(+), 313 deletions(-) create mode 100644 tests/test_hyperbolic.py create mode 100644 ttt_autoresearch/hyperbolic.py create mode 100644 ttt_autoresearch/hyperbolic_mirror.py diff --git a/.gitignore b/.gitignore index ffe156a4..e7838ca4 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,5 @@ dev/ results.tsv runs/ .pytest_cache/ +prompt.txt +response.txt diff --git a/README.md b/README.md index 844d6ed6..6d82015d 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,13 @@ This repo is a focused fork of [karpathy/autoresearch](https://github.com/karpathy/autoresearch) that replaces the outer experiment loop with [TTT-Discover](https://github.com/test-time-training/discover). -The checked-in default is now a practical unattended setup: +The checked-in defaults now support two primary outer-loop modes: -- **Outer loop:** Tinker + `openai/gpt-oss-120b` -- **Renderer:** `gpt_oss_high_reasoning` -- **Inner loop:** RunPod `H100 PCIe` spot workers +- **Kimi mode:** Tinker + `moonshotai/Kimi-K2.5` with `qwen3` +- **GPT-OSS mode:** Tinker + `openai/gpt-oss-120b` with `gpt_oss_high_reasoning` +- **Inner loop:** one Hyperbolic on-demand node with `8x H100` - **Main preset:** `2 groups x 8 rollouts x 12 steps` -- **Spot failover:** if a worker pod is preempted, the current rollout is retried on a replacement pod automatically +- **Launch mode:** detached remote controller so the run survives your local machine disconnecting The core objective stays the same as the original AutoResearch repo: improve [`train.py`](train.py) to lower `val_bpb`. @@ -22,7 +22,7 @@ This project builds on: - [Learning to Discover at Test Time](https://arxiv.org/abs/2601.16175) - [test-time-training/discover](https://github.com/test-time-training/discover) -The RL recipe stays with upstream `discover`. This repo provides the AutoResearch-specific environment, reward, runner, RunPod execution backend, and practical launch workflow. +The RL recipe stays with upstream `discover`. This repo provides the AutoResearch-specific environment, reward, runner, Hyperbolic execution/launch backend, and practical launch workflow. ## How The System Works @@ -34,45 +34,73 @@ There are two loops: - The score is `val_bpb`, and lower is better. 2. **Outer loop** - - TTT-Discover samples full-file replacements for `train.py`. + - TTT-Discover samples strict SEARCH/REPLACE patches against the current working `train.py`. - Each candidate is evaluated by the inner loop. - Reward is a direct transformed task score: `1 / (1e-8 + val_bpb)`. - Failed or invalid candidates receive `0.0` reward. - Upstream `discover` updates the outer model online. -The checked-in workflow keeps the outer controller on a stable machine and uses RunPod spot instances only for the inner evaluations. That is what lets the run continue unattended if a spot worker disappears. +Before any H100 evaluation, every candidate now goes through a CPU-side preflight: + +- patch-only parsing +- AST / `py_compile` validation +- batch-divisibility checks +- final `val_bpb` summary preservation +- `forward(... reduction=...)` compatibility checks + +Only preflight-passing candidates reach the GPU evaluator. + +The checked-in workflow launches the entire controller onto the Hyperbolic node itself. After launch, the outer loop and all inner evaluations keep running on the remote machine even if your laptop sleeps, disconnects, or closes. ## What “Unattended” Means Here This repo is designed so that: -- the controller process running [`run_ttt_discover.py`](run_ttt_discover.py) stays alive on a stable machine -- inner evaluations are dispatched to RunPod spot pods -- if a pod is preempted during a rollout, the runner provisions a replacement pod -- the interrupted rollout is retried from scratch on the replacement pod +- you start the run once from your local machine +- the repo bootstraps a Hyperbolic `8x H100` node over SSH +- it uploads a remote config and launches `run_ttt_discover.py` under `nohup` on that machine +- the remote controller runs the outer loop and the inner `train.py` evaluations locally on the node’s 8 GPUs - the run continues until the configured `groups_per_step x samples_per_step x max_steps` budget is completed -Important boundary: +The important implication is that the remote node is now the source of truth for checkpoints and artifacts. Your local machine is only used to kick the run off. + +The launcher also starts a local background mirror process by default: -- the controller process itself is **not** spot-resilient -- only the **inner worker pool** is spot-resilient +- while your laptop is online, it continuously pulls the full remote run directory back to `runs//mirror/` +- when the remote controller exits, it performs one final sync +- if your laptop is offline, the mirror pauses implicitly and the remote node remains the source of truth -So the outer process should run on your laptop, workstation, or another non-preemptible box. The H100 spot instances are only for the expensive inner `train.py` jobs. +## Primary Model Modes + +### Kimi K2.5 + +The default config at [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml) uses: + +- `model_name: moonshotai/Kimi-K2.5` +- `renderer_name: qwen3` +- `groups_per_step: 2` +- `samples_per_step: 8` +- `max_steps: 12` -## Current Default +### GPT-OSS 120B -The default config at [`configs/ttt_discover_autoresearch.yaml`](configs/ttt_discover_autoresearch.yaml) is the recommended medium run: +The shipped medium/large presets use: - `model_name: openai/gpt-oss-120b` - `renderer_name: gpt_oss_high_reasoning` + +### Shared Practical Defaults + +Both primary model modes use the same practical unattended search shape: + - `target_val_bpb: 0.85` -- `execution_backend: runpod` +- `execution_backend: hyperbolic` - `groups_per_step: 2` - `samples_per_step: 8` - `max_steps: 12` -- `max_concurrent_evaluations: 16` -- `runpod_gpu_type_ids: ["NVIDIA H100 PCIe"]` -- `runpod_interruptible: true` +- `max_concurrent_evaluations: 8` +- `gpu_devices: ["0", "1", "2", "3", "4", "5", "6", "7"]` +- `hyperbolic_detached_controller: true` That means: @@ -81,6 +109,7 @@ That means: - `192` rollout evaluations total - `1` extra baseline run before RL starts - `193` total inner jobs +- evaluations run in two waves per outer step on a single `8x H100` node ## Presets @@ -92,7 +121,7 @@ File: [`configs/ttt_discover_autoresearch_small.yaml`](configs/ttt_discover_auto - `2 x 4 x 12` - `96` RL rollouts -- `8` concurrent RunPod workers +- `8` concurrent GPU slots on the Hyperbolic node ### Medium @@ -100,7 +129,7 @@ File: [`configs/ttt_discover_autoresearch_medium.yaml`](configs/ttt_discover_aut - `2 x 8 x 12` - `192` RL rollouts -- `16` concurrent RunPod workers +- `8` concurrent GPU slots on the Hyperbolic node This is the recommended main mode and matches the default config. @@ -110,53 +139,66 @@ File: [`configs/ttt_discover_autoresearch_large.yaml`](configs/ttt_discover_auto - `2 x 8 x 20` - `320` RL rollouts -- `16` concurrent RunPod workers +- `8` concurrent GPU slots on the Hyperbolic node Use this only after the medium run is stable. -## RunPod Backend +## Hyperbolic Backend The inner-loop executor now supports two backends: - `local` -- `runpod` +- `hyperbolic` -The `runpod` backend does the following: +The `hyperbolic` backend does two different things depending on where the controller is running: -1. Creates up to `max_concurrent_evaluations` spot pods. -2. Waits for SSH on each pod. -3. Bootstraps the pod by: - - uploading the repo snapshot - - installing `uv` - - running `uv sync` - - running `uv run prepare.py --num-shards 10` -4. Uploads each candidate workspace to a worker pod. -5. Runs the inner command remotely. -6. Pulls back `stdout.log`, `stderr.log`, and metrics. -7. Deletes the pods automatically when the run finishes. +1. **Detached controller launch** + - connects to your Hyperbolic node over SSH + - uploads the repo snapshot + - runs `uv sync` + - runs `uv run prepare.py --num-shards 10` + - writes a remote config with `execution_backend: local` + - starts the full TTT controller under `nohup` -If a pod disappears during upload, bootstrap, or execution, the worker is retired, a replacement is created, and the interrupted rollout is retried. +2. **Inner evaluations on the remote node** + - the remote controller pins rollouts to `CUDA_VISIBLE_DEVICES=0..7` + - each rollout runs in an isolated workspace + - logs, metrics, and manifests are saved under the remote `run_dir` ## Prerequisites You need: -- Linux or macOS for the controller machine +- Linux or macOS for the launch machine - Python 3.11+ - [uv](https://docs.astral.sh/uv/) - a Tinker-enabled account for the outer loop -- a RunPod account with: - - API access - - an SSH public key registered in the account - - access to H100 spot instances +- a Hyperbolic account with: + - one running on-demand `8x H100` node + - an SSH public key registered on the account + - the node’s SSH host / IP + - enough credits to keep the machine alive for the full run Environment: ```bash -export RUNPOD_API_KEY=... +export OPENAI_API_KEY=... ``` -You also need whatever Tinker credentials your local `ttt-discover` installation expects. +Or equivalently: + +```bash +export TINKER_API_KEY=... +export OPENAI_API_KEY="$TINKER_API_KEY" +``` + +Optional for Kimi runs on fresh nodes: + +```bash +export HF_TOKEN=... +``` + +`HF_TOKEN` is not required for correctness. It only reduces Hugging Face rate-limit and cold-start friction when the Kimi tokenizer/custom code is downloaded on a fresh machine. ## Quick Start @@ -173,6 +215,16 @@ Or explicitly choose the medium preset: uv run python run_ttt_discover.py --config configs/ttt_discover_autoresearch_medium.yaml ``` +When the launch succeeds, the script prints the remote run directory and remote controller log path, and it writes the same metadata to `hyperbolic_launch.json` in the local launch directory. + +The launcher also refuses to start if another detached AutoResearch controller or `train.py` process is already active on the same Hyperbolic node. This prevents overlapping runs from silently OOMing each other. + +It also starts a local mirror process and records: + +- `local_mirror_dir` +- `local_mirror_log_path` +- `local_mirror_pid` + ## Cost And Runtime Shape For this repo, the expensive part is the inner loop. Each rollout is a real five-minute AutoResearch training job. @@ -190,63 +242,68 @@ That means the default medium run has: ### Example Medium Budget -Using your current spot numbers: +With a single `8x H100` node, the total GPU-hours are the same `17.47`, but they are spread across the 8 GPUs on that machine. + +If your node price is `P` dollars per GPU-hour, the GPU line item is approximately: -- `H100 PCIe spot: $1.25/hr` -- `H100 SXM spot: $1.75/hr` +- `17.47 x P` -The medium run works out to: +If Hyperbolic bills a full `8x H100` node at a flat hourly rate `N`, the same run costs approximately: -- `H100 PCIe`: about `$21.84` -- `H100 SXM`: about `$30.57` +- `(2.5 to 3.5 hours) x N` Tinker is the smaller cost bucket here. The exact amount depends on current Tinker pricing and token usage, but for this repo it is materially smaller than the H100 rental line item. ### Wall Clock -Total GPU-hours are roughly fixed, so more pods mostly reduce elapsed time, not total spend. +Approximate medium-run wall clock on one `8x H100` node: -Approximate medium-run wall clock: - -- `1 H100`: about `18-20h` -- `8 H100s`: about `2.5-3h` -- `16 H100s`: about `1.3-1.8h` +- about `2.5-3.5h` ## Model And Renderer -The checked-in default is: +One first-class outer-loop mode is: + +```yaml +model_name: moonshotai/Kimi-K2.5 +renderer_name: qwen3 +``` + +The other first-class outer-loop mode is: ```yaml model_name: openai/gpt-oss-120b renderer_name: gpt_oss_high_reasoning ``` -This is intentional: +This dual support is intentional: -- it matches the strongest paper-aligned model family more closely than the older Qwen default -- it is already supported by the renderer mapping in [`ttt_autoresearch/config.py`](ttt_autoresearch/config.py) -- it is the intended outer-loop model for the default RunPod workflow +- Kimi K2.5 and GPT-OSS-120B are both treated as primary outer-loop modes +- both are explicitly supported by the renderer mapping and tokenizer compatibility patches +- both use the same strict patch-only rollout/evaluation pipeline Other models still work, but if the model family is not recognized automatically you must set `renderer_name` explicitly. ## Important Config Knobs -The main knobs for unattended RunPod execution are: +The main knobs for unattended Hyperbolic execution are: - `execution_backend` - - use `runpod` for remote spot workers + - use `hyperbolic` to launch a detached remote controller - use `local` for direct local GPU execution - `max_concurrent_evaluations` - - number of worker pods for `runpod` + - number of simultaneous inner evaluations on the remote node - number of local simultaneous inner runs for `local` -- `runpod_gpu_type_ids` - - default is `["NVIDIA H100 PCIe"]` -- `runpod_interruptible` - - leave this `true` for spot behavior -- `runpod_bootstrap_commands` - - optional override if you want to use a custom image or template -- `runpod_retry_limit` - - how many times to reprovision and retry an interrupted rollout before surfacing a failure +- `hyperbolic_ssh_host` + - SSH host or IP for your Hyperbolic node +- `hyperbolic_ssh_user` + - defaults to `ubuntu` +- `hyperbolic_ssh_private_key_path` + - optional explicit SSH private key path +- `hyperbolic_detached_controller` + - default `true`; launches the whole controller remotely under `nohup` +- `gpu_devices` + - defaults to all eight GPUs on the remote node ## Fixed Prompt Target @@ -271,7 +328,7 @@ prepare.py Fixed data prep and runtime utilities train.py Inner training program edited by the outer model program.md Human-authored research instructions/context run_ttt_discover.py Main TTT-Discover entrypoint -ttt_autoresearch/ Adapter layer for environment, reward, runner, RunPod, config +ttt_autoresearch/ Adapter layer for environment, reward, runner, Hyperbolic, config configs/ Practical preset YAML configs tests/ Smoke and unit coverage for the adapter ``` @@ -287,9 +344,9 @@ Each run writes artifacts under `runs//`: - `best/metrics.json` - `candidates/` - `discover_log/` -- `runpod_pool.json` +- `hyperbolic_launch.json` -`runpod_pool.json` records the worker pod metadata for the current run so you can inspect what was provisioned. +`hyperbolic_launch.json` records the remote launch metadata for the detached Hyperbolic controller, including the remote run directory and remote log path. The important resume/checkpoint files are: @@ -313,11 +370,19 @@ The important resume/checkpoint files are: - parsed metrics sidecar for that rollout - `candidates/_/rollout_manifest.json` - self-contained rollout record with the starting state, candidate payload, evaluation result, reward, and promotion outcome +- `candidates/_/prompt.txt` + - exact prompt sent to the outer model for that rollout +- `candidates/_/response.txt` + - raw model response for that rollout - invalid or malformed model outputs are also persisted under `candidates/` with a `rollout_manifest.json`, `metrics.json`, and raw `response.txt` - `discover_log/checkpoints.jsonl` - upstream TTT-Discover checkpoint index - `discover_log/` - LoRA/training state and sampler checkpoints used for resume +- `hyperbolic_launch.json` + - local launch metadata, including the remote run dir and local mirror info +- `mirror/` + - best-effort local mirror of the remote run directory while your laptop is reachable ## Resuming A Stopped Run @@ -357,7 +422,7 @@ Important resume rule: ## Local Mode Still Exists -If you want to run without RunPod, set: +If you want to run without Hyperbolic, set: ```yaml execution_backend: local @@ -374,17 +439,18 @@ What is covered in tests: - candidate parsing - CLI wiring into upstream `discover` - local concurrency gating -- RunPod retry logic for interrupted workers -- runner cleanup behavior +- Hyperbolic detached launch wiring +- Hyperbolic runner/backend cleanup behavior +- malformed candidate persistence and rollout manifests What is still operationally environment-dependent: -- real RunPod API credentials -- SSH access from the controller to the worker pods +- SSH access from your launch machine to the Hyperbolic node +- a working Hyperbolic `8x H100` node with enough disk and credits - real Tinker credentials and provider setup -- long-run stability on your specific account and spot market +- long-run stability on your specific Hyperbolic account and node -So the repo is structurally ready for unattended Tinker + RunPod operation, but the final production proof is still a real run on your account. +So the repo is structurally ready for unattended Tinker + Hyperbolic operation, but the final production proof is still a real run on your account. ## License diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index bae46b7e..862825f8 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,4 +1,4 @@ -model_name: openai/gpt-oss-120b +model_name: moonshotai/Kimi-K2.5 provider: null api_base: null target_val_bpb: 0.85 @@ -12,41 +12,33 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover -renderer_name: gpt_oss_high_reasoning +renderer_name: qwen3 learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 phase1_max_tokens: 26000 save_every: 2 -wandb_project: autoresearch-ttt-discover +wandb_project: null num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 16 -gpu_devices: null -execution_backend: runpod -runpod_api_key_env: RUNPOD_API_KEY -runpod_api_base: https://rest.runpod.io/v1 -runpod_cloud_type: COMMUNITY -runpod_interruptible: true -runpod_gpu_type_ids: - - NVIDIA H100 PCIe -runpod_template_id: null -runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 -runpod_name_prefix: autoresearch-ttt -runpod_support_public_ip: true -runpod_ports: - - 22/tcp -runpod_container_disk_gb: 50 -runpod_volume_gb: 0 -runpod_volume_mount_path: /workspace -runpod_ssh_user: root -runpod_ssh_private_key_path: null -runpod_repo_root: /workspace/autoresearch -runpod_prepare_num_shards: 10 -runpod_bootstrap_timeout_sec: 7200 -runpod_retry_limit: 3 -runpod_poll_interval_sec: 5 -runpod_bootstrap_commands: null -runpod_terminate_on_close: true +max_concurrent_evaluations: 8 +gpu_devices: + - "0" + - "1" + - "2" + - "3" + - "4" + - "5" + - "6" + - "7" +execution_backend: hyperbolic +hyperbolic_ssh_host: 85.234.79.188 +hyperbolic_ssh_port: 22 +hyperbolic_ssh_user: ubuntu +hyperbolic_ssh_private_key_path: null +hyperbolic_repo_root: /home/ubuntu/autoresearch +hyperbolic_prepare_num_shards: 10 +hyperbolic_bootstrap_timeout_sec: 7200 +hyperbolic_bootstrap_commands: null diff --git a/configs/ttt_discover_autoresearch_large.yaml b/configs/ttt_discover_autoresearch_large.yaml index a63d4310..793814a8 100644 --- a/configs/ttt_discover_autoresearch_large.yaml +++ b/configs/ttt_discover_autoresearch_large.yaml @@ -18,35 +18,27 @@ lora_rank: 32 kl_penalty_coef: 0.1 phase1_max_tokens: 26000 save_every: 2 -wandb_project: autoresearch-ttt-discover +wandb_project: null num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 16 -gpu_devices: null -execution_backend: runpod -runpod_api_key_env: RUNPOD_API_KEY -runpod_api_base: https://rest.runpod.io/v1 -runpod_cloud_type: COMMUNITY -runpod_interruptible: true -runpod_gpu_type_ids: - - NVIDIA H100 PCIe -runpod_template_id: null -runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 -runpod_name_prefix: autoresearch-ttt -runpod_support_public_ip: true -runpod_ports: - - 22/tcp -runpod_container_disk_gb: 50 -runpod_volume_gb: 0 -runpod_volume_mount_path: /workspace -runpod_ssh_user: root -runpod_ssh_private_key_path: null -runpod_repo_root: /workspace/autoresearch -runpod_prepare_num_shards: 10 -runpod_bootstrap_timeout_sec: 7200 -runpod_retry_limit: 3 -runpod_poll_interval_sec: 5 -runpod_bootstrap_commands: null -runpod_terminate_on_close: true +max_concurrent_evaluations: 8 +gpu_devices: + - "0" + - "1" + - "2" + - "3" + - "4" + - "5" + - "6" + - "7" +execution_backend: hyperbolic +hyperbolic_ssh_host: null +hyperbolic_ssh_port: 22 +hyperbolic_ssh_user: ubuntu +hyperbolic_ssh_private_key_path: null +hyperbolic_repo_root: /home/ubuntu/autoresearch +hyperbolic_prepare_num_shards: 10 +hyperbolic_bootstrap_timeout_sec: 7200 +hyperbolic_bootstrap_commands: null diff --git a/configs/ttt_discover_autoresearch_medium.yaml b/configs/ttt_discover_autoresearch_medium.yaml index d46b6a9f..05797b51 100644 --- a/configs/ttt_discover_autoresearch_medium.yaml +++ b/configs/ttt_discover_autoresearch_medium.yaml @@ -18,35 +18,27 @@ lora_rank: 32 kl_penalty_coef: 0.1 phase1_max_tokens: 26000 save_every: 2 -wandb_project: autoresearch-ttt-discover +wandb_project: null num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 -max_concurrent_evaluations: 16 -gpu_devices: null -execution_backend: runpod -runpod_api_key_env: RUNPOD_API_KEY -runpod_api_base: https://rest.runpod.io/v1 -runpod_cloud_type: COMMUNITY -runpod_interruptible: true -runpod_gpu_type_ids: - - NVIDIA H100 PCIe -runpod_template_id: null -runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 -runpod_name_prefix: autoresearch-ttt -runpod_support_public_ip: true -runpod_ports: - - 22/tcp -runpod_container_disk_gb: 50 -runpod_volume_gb: 0 -runpod_volume_mount_path: /workspace -runpod_ssh_user: root -runpod_ssh_private_key_path: null -runpod_repo_root: /workspace/autoresearch -runpod_prepare_num_shards: 10 -runpod_bootstrap_timeout_sec: 7200 -runpod_retry_limit: 3 -runpod_poll_interval_sec: 5 -runpod_bootstrap_commands: null -runpod_terminate_on_close: true +max_concurrent_evaluations: 8 +gpu_devices: + - "0" + - "1" + - "2" + - "3" + - "4" + - "5" + - "6" + - "7" +execution_backend: hyperbolic +hyperbolic_ssh_host: null +hyperbolic_ssh_port: 22 +hyperbolic_ssh_user: ubuntu +hyperbolic_ssh_private_key_path: null +hyperbolic_repo_root: /home/ubuntu/autoresearch +hyperbolic_prepare_num_shards: 10 +hyperbolic_bootstrap_timeout_sec: 7200 +hyperbolic_bootstrap_commands: null diff --git a/configs/ttt_discover_autoresearch_small.yaml b/configs/ttt_discover_autoresearch_small.yaml index e6d58df1..00c75c01 100644 --- a/configs/ttt_discover_autoresearch_small.yaml +++ b/configs/ttt_discover_autoresearch_small.yaml @@ -18,35 +18,27 @@ lora_rank: 32 kl_penalty_coef: 0.1 phase1_max_tokens: 26000 save_every: 2 -wandb_project: autoresearch-ttt-discover +wandb_project: null num_cpus_per_task: 0 eval_timeout: 2700 local_model_path: null keep_history: 6 max_concurrent_evaluations: 8 -gpu_devices: null -execution_backend: runpod -runpod_api_key_env: RUNPOD_API_KEY -runpod_api_base: https://rest.runpod.io/v1 -runpod_cloud_type: COMMUNITY -runpod_interruptible: true -runpod_gpu_type_ids: - - NVIDIA H100 PCIe -runpod_template_id: null -runpod_image_name: runpod/pytorch:2.8.0-py3.11-cuda12.8.1-cudnn-devel-ubuntu22.04 -runpod_name_prefix: autoresearch-ttt -runpod_support_public_ip: true -runpod_ports: - - 22/tcp -runpod_container_disk_gb: 50 -runpod_volume_gb: 0 -runpod_volume_mount_path: /workspace -runpod_ssh_user: root -runpod_ssh_private_key_path: null -runpod_repo_root: /workspace/autoresearch -runpod_prepare_num_shards: 10 -runpod_bootstrap_timeout_sec: 7200 -runpod_retry_limit: 3 -runpod_poll_interval_sec: 5 -runpod_bootstrap_commands: null -runpod_terminate_on_close: true +gpu_devices: + - "0" + - "1" + - "2" + - "3" + - "4" + - "5" + - "6" + - "7" +execution_backend: hyperbolic +hyperbolic_ssh_host: null +hyperbolic_ssh_port: 22 +hyperbolic_ssh_user: ubuntu +hyperbolic_ssh_private_key_path: null +hyperbolic_repo_root: /home/ubuntu/autoresearch +hyperbolic_prepare_num_shards: 10 +hyperbolic_bootstrap_timeout_sec: 7200 +hyperbolic_bootstrap_commands: null diff --git a/pyproject.toml b/pyproject.toml index 66d6b53f..9db40c34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,12 +14,13 @@ dependencies = [ "rustbpe>=0.1.0", "tiktoken>=0.11.0", "ttt-discover @ git+https://github.com/test-time-training/discover@5df1a0ee9b04272ca33de0101ae64dd499e63f29", - "torch==2.9.1", + "torch==2.9.1; sys_platform == 'darwin'", + "torch==2.9.1; sys_platform != 'darwin'", ] [tool.uv.sources] torch = [ - { index = "pytorch-cu128" }, + { index = "pytorch-cu128", marker = "sys_platform != 'darwin'" }, ] [[tool.uv.index]] diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py index 07fa49f4..b8c6e049 100644 --- a/tests/test_cli_integration.py +++ b/tests/test_cli_integration.py @@ -11,6 +11,63 @@ class CliIntegrationTests(unittest.TestCase): + def test_cli_launches_detached_hyperbolic_controller(self) -> None: + captured: dict[str, object] = {} + + class FakeLauncher: + def __init__(self, repo_root: Path, run_dir: Path, config) -> None: + captured["repo_root"] = repo_root + captured["run_dir"] = run_dir + captured["config"] = config + + def launch_detached_controller(self) -> dict[str, str]: + captured["launched"] = True + return { + "remote_run_dir": "/home/ubuntu/autoresearch/runs/demo", + "remote_config_path": "/home/ubuntu/autoresearch/runs/launches/demo/remote_config.yaml", + "remote_log_path": "/home/ubuntu/autoresearch/runs/launches/demo/controller.log", + "remote_pid_path": "/home/ubuntu/autoresearch/runs/launches/demo/controller.pid", + "remote_exitcode_path": "/home/ubuntu/autoresearch/runs/launches/demo/controller.exitcode", + "remote_launch_dir": "/home/ubuntu/autoresearch/runs/launches/demo", + } + + original_launcher = cli.HyperbolicPool + original_mirror = cli._start_hyperbolic_mirror + cli.HyperbolicPool = FakeLauncher # type: ignore[assignment] + cli._start_hyperbolic_mirror = lambda config, run_dir, launch_info: { # type: ignore[assignment] + "local_mirror_dir": str(run_dir / "mirror"), + "local_mirror_log_path": str(run_dir / "hyperbolic_mirror.log"), + "local_mirror_pid": "12345", + } + try: + with tempfile.TemporaryDirectory() as tmpdir: + tmp_path = Path(tmpdir) + run_dir = tmp_path / "runs" / "hyperbolic-demo" + config_path = tmp_path / "config.yaml" + config_path.write_text( + "\n".join( + [ + "model_name: openai/gpt-oss-120b", + "renderer_name: gpt_oss_high_reasoning", + "execution_backend: hyperbolic", + "hyperbolic_ssh_host: 1.2.3.4", + f"run_dir: {run_dir}", + ] + ) + + "\n", + encoding="utf-8", + ) + exit_code = cli.main(["--config", str(config_path)]) + self.assertEqual(exit_code, 0) + self.assertTrue(captured.get("launched")) + self.assertTrue((run_dir / "hyperbolic_launch.json").exists()) + self.assertTrue((run_dir / "resolved_config.json").exists()) + launch = (run_dir / "hyperbolic_launch.json").read_text(encoding="utf-8") + self.assertIn("local_mirror_dir", launch) + finally: + cli.HyperbolicPool = original_launcher # type: ignore[assignment] + cli._start_hyperbolic_mirror = original_mirror # type: ignore[assignment] + def test_resolve_config_path_falls_back_to_repo_root(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: repo_root = Path(tmpdir) diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 1cd7252a..7aed1a6e 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -11,6 +11,20 @@ from ttt_autoresearch.reward import AutoResearchRewardEvaluator from ttt_autoresearch.runner import AutoResearchRunner +MINIMAL_VALID_TRAIN_PY = """from prepare import MAX_SEQ_LEN +TOTAL_BATCH_SIZE = 2048 +DEVICE_BATCH_SIZE = 1 +tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN +assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0 + +class GPT: + def forward(self, idx, targets=None, reduction='mean'): + return 0 + +# val_bpb: 1.100000 +print(f"val_bpb: {1.1:.6f}") +""" + class EnvSmokeTests(unittest.TestCase): def test_state_prompt_shows_before_after_without_construction(self) -> None: @@ -34,7 +48,7 @@ def test_env_prompt_and_reward_flow(self) -> None: root = Path(tmpdir) (root / "program.md").write_text("Focus on val_bpb.", encoding="utf-8") (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") - (root / "train.py").write_text("# val_bpb: 1.100000\n", encoding="utf-8") + (root / "train.py").write_text(MINIMAL_VALID_TRAIN_PY, encoding="utf-8") fixtures = root / "tests" / "fixtures" fixtures.mkdir(parents=True) fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" @@ -76,11 +90,13 @@ def test_env_prompt_and_reward_flow(self) -> None: self.assertNotIn("LOOP FOREVER", prompt) self.assertNotIn("results.tsv", prompt) self.assertNotIn("git reset", prompt) - self.assertTrue(env.check_format('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}')) + self.assertIn("TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0", prompt) + payload = "<<<<<<< SEARCH\n# val_bpb: 1.100000\n=======\n# val_bpb: 0.900000\n>>>>>>> REPLACE" + self.assertTrue(env.check_format(payload)) - verify = asyncio.run(env.check_answer('{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}', 0)) + verify = asyncio.run(env.check_answer(payload, 0)) self.assertGreater(verify.reward, 0.0) - next_state = env._create_next_state(0, '{"summary":"s","rationale":"r","train_py":"# val_bpb: 0.900000\\n"}', verify) + next_state = env._create_next_state(0, payload, verify) self.assertAlmostEqual(next_state.current_best_val_bpb, 0.9) diff --git a/tests/test_hyperbolic.py b/tests/test_hyperbolic.py new file mode 100644 index 00000000..c84ec9d9 --- /dev/null +++ b/tests/test_hyperbolic.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +from pathlib import Path +import subprocess +import tempfile +import unittest +from unittest import mock + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.discover_compat import patch_ttt_discover_no_wandb_bug +from ttt_autoresearch.hyperbolic import HyperbolicError, HyperbolicPool, RemoteExecutionResult +from ttt_autoresearch.runner import AutoResearchRunner + + +class HyperbolicPoolTests(unittest.TestCase): + def test_validate_requires_host(self) -> None: + config = TTTAutoResearchConfig(execution_backend="hyperbolic", hyperbolic_ssh_host=None).normalized(Path(".")) + pool = object.__new__(HyperbolicPool) + pool.config = config + with self.assertRaises(HyperbolicError): + pool._validate_config() + + def test_validate_ssh_key_rejects_missing_key(self) -> None: + config = TTTAutoResearchConfig( + execution_backend="hyperbolic", + hyperbolic_ssh_host="1.2.3.4", + hyperbolic_ssh_private_key_path="/nonexistent/path/to/key", + ).normalized(Path(".")) + pool = object.__new__(HyperbolicPool) + pool.config = config + with self.assertRaises(HyperbolicError): + pool._validate_ssh_key() + + def test_runner_close_shuts_down_hyperbolic_pool(self) -> None: + class FakePool: + def __init__(self) -> None: + self.closed = False + + def close(self) -> None: + self.closed = True + + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + pool = FakePool() + runner._hyperbolic_pool = pool # type: ignore[assignment] + runner.close() + self.assertTrue(pool.closed) + + def test_runner_uses_hyperbolic_backend(self) -> None: + class FakePool: + def __init__(self) -> None: + self.last_env = None + + def execute_workspace(self, workspace: Path, command: list[str], env: dict[str, str], timeout_sec: int, label: str) -> RemoteExecutionResult: + self.last_env = dict(env) + return RemoteExecutionResult(stdout="val_bpb: 0.900000\n", stderr="", returncode=0, elapsed_sec=1.0) + + def close(self) -> None: + return None + + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("print('ok')\n", encoding="utf-8") + config = TTTAutoResearchConfig( + execution_backend="hyperbolic", + hyperbolic_ssh_host="1.2.3.4", + gpu_devices=["0", "1"], + baseline_command_override=["python3", "train.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + fake_pool = FakePool() + runner._hyperbolic_pool = fake_pool # type: ignore[assignment] + bootstrap = runner.build_bootstrap(1.0) + result = runner.run_baseline(bootstrap=bootstrap) + self.assertEqual(result.status, "success") + self.assertAlmostEqual(result.val_bpb, 0.9) + + def test_detached_launch_aliases_openai_key_to_tinker_key(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + run_dir = root / "runs" / "demo" + run_dir.mkdir(parents=True) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("print('ok')\n", encoding="utf-8") + config = TTTAutoResearchConfig( + execution_backend="hyperbolic", + hyperbolic_ssh_host="1.2.3.4", + run_dir=str(run_dir), + ).normalized(root) + pool = object.__new__(HyperbolicPool) + pool.repo_root = root + pool.run_dir = run_dir + pool.config = config + pool.repo_archive_path = run_dir / "bundle.tar.gz" + pool.repo_archive_lock = None + pool.bootstrap_lock = None + pool.bootstrap_complete = True + + uploaded_scripts: list[str] = [] + + def fake_run_ssh(command: str, timeout: int, check: bool): + return RemoteExecutionResult(stdout="", stderr="", returncode=0, elapsed_sec=0.0) + + def fake_upload(local_path: Path, remote_path: str): + if remote_path.endswith("start_controller.sh"): + uploaded_scripts.append(Path(local_path).read_text(encoding="utf-8")) + + pool._ensure_node_ready = lambda: None # type: ignore[method-assign] + pool._run_ssh = fake_run_ssh # type: ignore[method-assign] + pool._upload_file = fake_upload # type: ignore[method-assign] + pool._build_remote_controller_config = lambda remote_run_dir: {"run_dir": remote_run_dir} # type: ignore[method-assign] + + with mock.patch.dict("os.environ", {"OPENAI_API_KEY": "abc123"}, clear=False): + pool.launch_detached_controller() + + self.assertEqual(len(uploaded_scripts), 1) + self.assertIn("export OPENAI_API_KEY=abc123", uploaded_scripts[0]) + self.assertIn("export TINKER_API_KEY=abc123", uploaded_scripts[0]) + + def test_no_wandb_patch_pads_logger_list(self) -> None: + try: + from ttt_discover.tinker_utils import ml_log + except ImportError: + self.skipTest("ttt_discover not installed in local environment") + patch_ttt_discover_no_wandb_bug() + logger = ml_log.setup_logging(log_dir=tempfile.mkdtemp(), wandb_project="demo", wandb_name="demo", config=None) + self.assertGreaterEqual(len(logger.loggers), 3) + + def test_detached_launch_refuses_active_remote_runs(self) -> None: + config = TTTAutoResearchConfig( + execution_backend="hyperbolic", + hyperbolic_ssh_host="1.2.3.4", + ).normalized(Path(".")) + pool = object.__new__(HyperbolicPool) + pool.config = config + + def fake_run_ssh(command: str, timeout: int, check: bool): + if "pgrep -af" in command: + return subprocess.CompletedProcess( + args=["ssh"], + returncode=12, + stdout="Detected active AutoResearch processes already running on the Hyperbolic node.\nControllers:\n123 python run_ttt_discover.py", + stderr="", + ) + raise AssertionError("unexpected remote command") + + pool._run_ssh = fake_run_ssh # type: ignore[method-assign] + with self.assertRaises(HyperbolicError): + pool._assert_no_active_remote_runs() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_prompt_builder.py b/tests/test_prompt_builder.py index 3ec5b64c..13fb2381 100644 --- a/tests/test_prompt_builder.py +++ b/tests/test_prompt_builder.py @@ -12,12 +12,15 @@ def test_prompt_is_single_rollout_specific(self) -> None: construction_section=( "You may want to start your search from the current training script shown above.\n" "This is the current starting point selected by the search procedure.\n" + "Make one focused experimental change at a time and preserve a working script.\n" "You are encouraged to explore meaningfully different directions if the current approach appears saturated." ), code_section=( "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" + "Prefer small, local hill-climbing edits over broad rewrites.\n" "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" + "Do not refactor unrelated code.\n" "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." ), ) @@ -29,13 +32,27 @@ def test_prompt_is_single_rollout_specific(self) -> None: self.assertIn("You are iteratively optimizing val_bpb.", prompt) self.assertIn("You may want to start your search from the current training script shown above.", prompt) self.assertIn("This is the current starting point selected by the search procedure.", prompt) + self.assertIn("Make one focused experimental change at a time and preserve a working script.", prompt) self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) + self.assertIn("Prefer small, local hill-climbing edits over broad rewrites.", prompt) self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) + self.assertIn("Do not refactor unrelated code.", prompt) self.assertIn("Maximum sequence length is `2048`", prompt) self.assertIn("Validation uses the pinned shard `06542`", prompt) self.assertIn("vocab size `8192`", prompt) self.assertIn("forward(x, y, reduction='none')", prompt) - self.assertIn("Return exactly one ```json``` block", prompt) + self.assertIn("TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0", prompt) + self.assertIn("Preserve the final summary prints", prompt) + self.assertIn("Return only one or more exact SEARCH/REPLACE edit blocks for `train.py`", prompt) + self.assertIn("Prefer 1-3 small patch blocks", prompt) + self.assertIn("Treat each SEARCH block like an exact `old_string` tool argument", prompt) + self.assertIn("Do not return standalone code fragments", prompt) + self.assertIn("Do not wrap the answer in JSON", prompt) + self.assertIn("Do not wrap the answer in markdown code fences", prompt) + self.assertIn("Do not abbreviate with `...` or placeholders", prompt) + self.assertIn("## Example Response", prompt) + self.assertIn("<<<<<<< SEARCH", prompt) + self.assertIn(">>>>>>> REPLACE", prompt) self.assertNotIn("Baseline val_bpb from the original script", prompt) self.assertNotIn("LOOP FOREVER", prompt) self.assertNotIn("results.tsv", prompt) diff --git a/tests/test_reward.py b/tests/test_reward.py index 1a817425..f8442849 100644 --- a/tests/test_reward.py +++ b/tests/test_reward.py @@ -11,7 +11,21 @@ from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig from ttt_autoresearch.env import AutoResearchState from ttt_autoresearch.reward import AutoResearchRewardEvaluator, reward_for_result -from ttt_autoresearch.runner import AutoResearchRunner, RunResult +from ttt_autoresearch.runner import AutoResearchRunner, RunResult, parse_patch_candidate_for_state + +MINIMAL_VALID_TRAIN_PY = """from prepare import MAX_SEQ_LEN +TOTAL_BATCH_SIZE = 2048 +DEVICE_BATCH_SIZE = 1 +tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN +assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0 + +class GPT: + def forward(self, idx, targets=None, reduction='mean'): + return 0 + +# val_bpb: 1.000000 +print(f"val_bpb: {1.0:.6f}") +""" class RewardTests(unittest.TestCase): @@ -66,7 +80,7 @@ def test_evaluator_uses_inner_metric_as_reward(self) -> None: root = Path(tmpdir) (root / "program.md").write_text("program", encoding="utf-8") (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") - (root / "train.py").write_text("# val_bpb: 1.000000\n", encoding="utf-8") + (root / "train.py").write_text(MINIMAL_VALID_TRAIN_PY, encoding="utf-8") fixtures = root / "tests" / "fixtures" fixtures.mkdir(parents=True) fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" @@ -91,21 +105,24 @@ def test_evaluator_uses_inner_metric_as_reward(self) -> None: baseline_val_bpb=1.0, current_best_val_bpb=1.0, ) - payload = '{"summary":"improve","rationale":"lower loss","train_py":"# val_bpb: 0.900000\\n"}' + payload = "<<<<<<< SEARCH\n# val_bpb: 1.000000\n=======\n# val_bpb: 0.900000\n>>>>>>> REPLACE" result = evaluator.get_reward(payload, state) self.assertGreater(result["reward"], 0.0) self.assertEqual(result["correctness"], 1.0) manifest = json.loads((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json").__next__().read_text(encoding="utf-8")) self.assertEqual(manifest["starting_state"]["timestep"], -1) - self.assertEqual(manifest["candidate"]["summary"], "improve") + self.assertEqual(manifest["candidate"]["summary"], "search_replace_patch_candidate") self.assertEqual(manifest["evaluation"]["status"], "success") + self.assertIn("Problem", manifest["prompt"]) + self.assertTrue((Path(manifest["prompt_path"])).exists()) + self.assertTrue((Path(manifest["raw_response_path"])).exists()) def test_invalid_candidate_is_persisted_to_history_and_manifest(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) (root / "program.md").write_text("program", encoding="utf-8") (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") - (root / "train.py").write_text("# val_bpb: 1.000000\n", encoding="utf-8") + (root / "train.py").write_text(MINIMAL_VALID_TRAIN_PY, encoding="utf-8") config = TTTAutoResearchConfig( execution_backend="local", @@ -125,7 +142,7 @@ def test_invalid_candidate_is_persisted_to_history_and_manifest(self) -> None: current_best_val_bpb=1.0, ) - result = evaluator.get_reward("not-json", state) + result = evaluator.get_reward("", state) self.assertEqual(result["metrics"]["candidate_status"], "invalid_candidate") history_path = Path(config.run_dir) / "history.jsonl" @@ -136,7 +153,70 @@ def test_invalid_candidate_is_persisted_to_history_and_manifest(self) -> None: manifest_path = next((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json")) manifest = json.loads(manifest_path.read_text(encoding="utf-8")) self.assertEqual(manifest["evaluation"]["status"], "invalid_candidate") - self.assertEqual(manifest["raw_response"], "not-json") + self.assertEqual(manifest["raw_response"], "") + self.assertIn("Problem", manifest["prompt"]) + + def test_parse_patch_candidate_rejects_raw_code(self) -> None: + with self.assertRaises(ValueError): + parse_patch_candidate_for_state("print(1)\n", "print(0)\n") + + def test_parse_patch_candidate_accepts_search_replace_patch(self) -> None: + candidate = parse_patch_candidate_for_state( + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE", + "print(0)\n", + ) + self.assertEqual(candidate.summary, "search_replace_patch_candidate") + self.assertEqual(candidate.train_py, "print(1)\n") + + def test_preflight_failed_candidate_is_persisted_to_history_and_manifest(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + original = ( + "from prepare import MAX_SEQ_LEN\n" + "TOTAL_BATCH_SIZE = 8\n" + "DEVICE_BATCH_SIZE = 1\n" + "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" + "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" + "class GPT:\n" + " def forward(self, idx, targets=None, reduction='mean'):\n" + " return 0\n" + "print(f\"val_bpb: {1.0:.6f}\")\n" + ) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text(original, encoding="utf-8") + + config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, + timeout_sec=1, + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.0) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + evaluator = AutoResearchRewardEvaluator(problem_type="autoresearch", log_dir=str(bootstrap.run_dir)) + state = AutoResearchState( + timestep=0, + construction=[], + code=original, + value=-1.0, + baseline_val_bpb=1.0, + current_best_val_bpb=1.0, + ) + + payload = "<<<<<<< SEARCH\nTOTAL_BATCH_SIZE = 8\n=======\nTOTAL_BATCH_SIZE = 7\n>>>>>>> REPLACE" + result = evaluator.get_reward(payload, state) + + self.assertEqual(result["metrics"]["candidate_status"], "preflight_failed") + history_path = Path(config.run_dir) / "history.jsonl" + history = json.loads(history_path.read_text(encoding="utf-8").strip()) + self.assertEqual(history["status"], "preflight_failed") + self.assertEqual(history["failure_stage"], "batch_divisibility") + manifest_path = next((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json")) + manifest = json.loads(manifest_path.read_text(encoding="utf-8")) + self.assertEqual(manifest["evaluation"]["status"], "preflight_failed") + self.assertEqual(manifest["preflight"]["stage"], "batch_divisibility") + self.assertTrue((Path(manifest["preflight_path"])).exists()) def test_concurrent_reward_calls_serialize_inner_evaluations(self) -> None: class FakeRunner: @@ -144,8 +224,26 @@ def __init__(self) -> None: self.lock = threading.Lock() self.active = 0 self.max_seen = 0 + self._workspace_index = 0 + + def prepare_candidate_workspace(self, candidate, step: int, prefix: str = "candidate") -> Path: + workspace = Path(tempfile.mkdtemp()) / f"{step:04d}_{prefix}_{self._workspace_index}" + self._workspace_index += 1 + workspace.mkdir(parents=True, exist_ok=True) + (workspace / "train.py").write_text(candidate.train_py, encoding="utf-8") + return workspace + + def preflight_candidate(self, workspace: Path, candidate): + from ttt_autoresearch.runner import PreflightResult + + return PreflightResult(ok=True, stage="ok", reason="ok", details={}) + + def write_json_artifact(self, path: Path, payload: dict[str, object]) -> Path: + path.write_text("{}", encoding="utf-8") + return path - def run_candidate(self, **_: object) -> RunResult: + def run_candidate(self, **kwargs: object) -> RunResult: + workspace = kwargs["workspace"] with self.lock: self.active += 1 self.max_seen = max(self.max_seen, self.active) @@ -160,7 +258,7 @@ def run_candidate(self, **_: object) -> RunResult: stdout_path=Path("stdout.log"), stderr_path=Path("stderr.log"), elapsed_sec=0.1, - workspace_path=Path("."), + workspace_path=Path(workspace), metrics_path=None, command=["python", "train.py"], returncode=0, @@ -189,7 +287,7 @@ def read_text(self, _: Path, max_chars: int = 4000) -> str: runner = FakeRunner() AutoResearchRewardEvaluator.configure(bootstrap, runner) # type: ignore[arg-type] evaluator = AutoResearchRewardEvaluator(problem_type="autoresearch", log_dir=".") - payload = '{"summary":"improve","rationale":"lower loss","train_py":"print(1)\\n"}' + payload = "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE" def make_state() -> AutoResearchState: return AutoResearchState( diff --git a/tests/test_runner.py b/tests/test_runner.py index cd6b10cb..ec511d3b 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -7,13 +7,34 @@ import os from ttt_autoresearch.config import TTTAutoResearchConfig -from ttt_autoresearch.runner import AutoResearchRunner, parse_patch_candidate, parse_val_bpb +from ttt_autoresearch.runner import AutoResearchRunner, parse_patch_candidate_for_state, parse_val_bpb class RunnerTests(unittest.TestCase): - def test_parse_candidate_rejects_unknown_keys(self) -> None: + def test_parse_candidate_rejects_legacy_json(self) -> None: with self.assertRaises(ValueError): - parse_patch_candidate('{"summary":"s","rationale":"r","train_py":"x","prepare_py":"bad"}') + parse_patch_candidate_for_state('{"summary":"s","rationale":"r","train_py":"x"}', "x") + + def test_parse_candidate_rejects_raw_python(self) -> None: + with self.assertRaises(ValueError): + parse_patch_candidate_for_state("print(1)\n", "print(0)\n") + + def test_parse_candidate_accepts_search_replace_patch(self) -> None: + candidate = parse_patch_candidate_for_state( + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE", + "print(0)\n", + ) + self.assertEqual(candidate.candidate_format, "search_replace_patch") + self.assertEqual(candidate.patch_block_count, 1) + self.assertEqual(candidate.train_py, "print(1)\n") + + def test_parse_candidate_extracts_patch_from_wrapper_text(self) -> None: + candidate = parse_patch_candidate_for_state( + "Here is the patch\n<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE\nDone.", + "print(0)\n", + ) + self.assertEqual(candidate.candidate_format, "search_replace_patch_extracted") + self.assertEqual(candidate.train_py, "print(1)\n") def test_parse_val_bpb(self) -> None: stdout = "---\nval_bpb: 0.997900\n" @@ -42,6 +63,92 @@ def test_runner_reads_metric_and_status(self) -> None: self.assertAlmostEqual(result.val_bpb, 1.25) self.assertTrue((Path(config.run_dir) / "baseline" / "train.py").exists()) + def test_preflight_rejects_invalid_batch_divisibility(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + original = ( + "from prepare import MAX_SEQ_LEN\n" + "TOTAL_BATCH_SIZE = 8\n" + "DEVICE_BATCH_SIZE = 2\n" + "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" + "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" + "class GPT:\n" + " def forward(self, idx, targets=None, reduction='mean'):\n" + " return 0\n" + "print(f\"val_bpb: {1.0:.6f}\")\n" + ) + (root / "train.py").write_text(original, encoding="utf-8") + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + candidate = parse_patch_candidate_for_state( + "<<<<<<< SEARCH\nTOTAL_BATCH_SIZE = 8\n=======\nTOTAL_BATCH_SIZE = 7\n>>>>>>> REPLACE", + original, + ) + workspace = runner.prepare_candidate_workspace(candidate, step=0) + preflight = runner.preflight_candidate(workspace, candidate) + self.assertFalse(preflight.ok) + self.assertEqual(preflight.stage, "batch_divisibility") + + def test_preflight_rejects_missing_val_bpb_summary(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + original = ( + "from prepare import MAX_SEQ_LEN\n" + "TOTAL_BATCH_SIZE = 8\n" + "DEVICE_BATCH_SIZE = 1\n" + "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" + "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" + "class GPT:\n" + " def forward(self, idx, targets=None, reduction='mean'):\n" + " return 0\n" + "print('val_bpb: 1.0')\n" + ) + (root / "train.py").write_text(original, encoding="utf-8") + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + candidate = parse_patch_candidate_for_state( + "<<<<<<< SEARCH\nprint('val_bpb: 1.0')\n=======\nprint('done')\n>>>>>>> REPLACE", + original, + ) + workspace = runner.prepare_candidate_workspace(candidate, step=0) + preflight = runner.preflight_candidate(workspace, candidate) + self.assertFalse(preflight.ok) + self.assertEqual(preflight.stage, "summary_output") + + def test_preflight_rejects_overly_large_patch_scope(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + original = ( + "from prepare import MAX_SEQ_LEN\n" + "TOTAL_BATCH_SIZE = 2048\n" + "DEVICE_BATCH_SIZE = 1\n" + "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" + "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" + "class GPT:\n" + " def forward(self, idx, targets=None, reduction='mean'):\n" + " return 0\n" + "print(f\"val_bpb: {1.0:.6f}\")\n" + ) + (root / "train.py").write_text(original, encoding="utf-8") + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + search_block = "print(f\"val_bpb: {1.0:.6f}\")\n" + replacement = "".join(f"print({idx})\n" for idx in range(250)) + candidate = parse_patch_candidate_for_state( + f"<<<<<<< SEARCH\n{search_block}=======\n{replacement}>>>>>>> REPLACE", + original, + ) + workspace = runner.prepare_candidate_workspace(candidate, step=0) + preflight = runner.preflight_candidate(workspace, candidate) + self.assertFalse(preflight.ok) + self.assertEqual(preflight.stage, "edit_scope") + def test_build_bootstrap_prefers_stored_baseline_snapshot(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) @@ -108,16 +215,26 @@ def test_unknown_model_requires_explicit_renderer(self) -> None: ).normalized(root) self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") + def test_kimi_model_is_primary_supported_renderer(self) -> None: + config = TTTAutoResearchConfig( + model_name="moonshotai/Kimi-K2.5", + execution_backend="local", + ).normalized(Path(".")) + self.assertEqual(config.renderer_name, "qwen3") + def test_group_defaults_reflect_medium_preset(self) -> None: config = TTTAutoResearchConfig().normalized(Path(".")) self.assertEqual(config.model_name, "openai/gpt-oss-120b") - self.assertEqual(config.execution_backend, "runpod") + self.assertEqual(config.execution_backend, "hyperbolic") self.assertEqual(config.max_steps, 12) self.assertEqual(config.groups_per_step, 2) self.assertEqual(config.samples_per_step, 8) - self.assertEqual(config.max_concurrent_evaluations, 16) + self.assertEqual(config.max_concurrent_evaluations, 8) self.assertEqual(config.renderer_name, "gpt_oss_high_reasoning") - self.assertEqual(config.runpod_gpu_type_ids, ["NVIDIA H100 PCIe"]) + self.assertEqual(config.gpu_devices, ["0", "1", "2", "3", "4", "5", "6", "7"]) + self.assertIsNone(config.wandb_project) + self.assertIn("HF_TOKEN", config.hyperbolic_forward_env_vars) + self.assertNotIn("WANDB_API_KEY", config.hyperbolic_forward_env_vars) def test_gpu_devices_are_normalized(self) -> None: config = TTTAutoResearchConfig(gpu_devices=[0, 3, 7]).normalized(Path(".")) diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py index a72a4ce7..8ae5b492 100644 --- a/ttt_autoresearch/cli.py +++ b/ttt_autoresearch/cli.py @@ -2,11 +2,19 @@ import argparse import asyncio +import json from pathlib import Path +import subprocess import sys from ttt_autoresearch.config import TTTAutoResearchConfig, load_config, write_resolved_config +from ttt_autoresearch.discover_compat import ( + patch_transformers_kimi_trust_remote_code, + patch_ttt_discover_kimi_tokenizer, + patch_ttt_discover_no_wandb_bug, +) from ttt_autoresearch.env import AutoResearchDiscoverEnv +from ttt_autoresearch.hyperbolic import HyperbolicPool from ttt_autoresearch.reward import AutoResearchRewardEvaluator from ttt_autoresearch.runner import AutoResearchRunner @@ -28,6 +36,28 @@ def main(argv: list[str] | None = None) -> int: config_path = _resolve_config_path(args.config, repo_root) config = load_config(config_path, repo_root=repo_root) config = _apply_overrides(config, args) + run_dir = Path(config.run_dir) + + if config.execution_backend == "hyperbolic" and config.hyperbolic_detached_controller: + run_dir.mkdir(parents=True, exist_ok=True) + launcher = HyperbolicPool(repo_root=repo_root, run_dir=run_dir, config=config) + launch_info = launcher.launch_detached_controller() + write_resolved_config(run_dir / "resolved_config.json", config) + if config.hyperbolic_local_mirror: + mirror_info = _start_hyperbolic_mirror(config=config, run_dir=run_dir, launch_info=launch_info) + launch_info.update(mirror_info) + (run_dir / "hyperbolic_launch.json").write_text( + json.dumps(launch_info, indent=2, sort_keys=True) + "\n", + encoding="utf-8", + ) + print(f"Started detached Hyperbolic controller on {config.hyperbolic_ssh_host}.") + print(f"Remote run dir: {launch_info['remote_run_dir']}") + print(f"Remote log: {launch_info['remote_log_path']}") + if config.hyperbolic_local_mirror: + print(f"Local mirror: {launch_info['local_mirror_dir']}") + return 0 + + patch_transformers_kimi_trust_remote_code() try: from ttt_discover.rl.train import Config as RLConfig, main as discover_main @@ -39,7 +69,9 @@ def main(argv: list[str] | None = None) -> int: ) raise AssertionError from exc - run_dir = Path(config.run_dir) + patch_ttt_discover_no_wandb_bug() + patch_ttt_discover_kimi_tokenizer() + runner = AutoResearchRunner(repo_root=repo_root, config=config, run_dir=run_dir) try: baseline_result = runner.load_existing_baseline_result() @@ -120,5 +152,48 @@ def _apply_overrides(config: TTTAutoResearchConfig, args: argparse.Namespace) -> return TTTAutoResearchConfig(**updated).normalized(Path(__file__).resolve().parent.parent) +def _start_hyperbolic_mirror(config: TTTAutoResearchConfig, run_dir: Path, launch_info: dict[str, str]) -> dict[str, str]: + mirror_dir = Path(config.hyperbolic_local_mirror_dir) if config.hyperbolic_local_mirror_dir else run_dir / "mirror" + mirror_dir.mkdir(parents=True, exist_ok=True) + log_path = run_dir / "hyperbolic_mirror.log" + with log_path.open("ab") as handle: + process = subprocess.Popen( + [ + sys.executable, + "-m", + "ttt_autoresearch.hyperbolic_mirror", + "--host", + str(config.hyperbolic_ssh_host), + "--port", + str(config.hyperbolic_ssh_port), + "--user", + str(config.hyperbolic_ssh_user), + "--remote-run-dir", + launch_info["remote_run_dir"], + "--remote-launch-dir", + launch_info["remote_launch_dir"], + "--remote-exitcode-path", + launch_info["remote_exitcode_path"], + "--local-dest", + str(mirror_dir), + "--interval-sec", + str(config.hyperbolic_sync_interval_sec), + *( + ["--identity-file", config.hyperbolic_ssh_private_key_path] + if config.hyperbolic_ssh_private_key_path + else [] + ), + ], + stdout=handle, + stderr=subprocess.STDOUT, + start_new_session=True, + ) + return { + "local_mirror_dir": str(mirror_dir), + "local_mirror_log_path": str(log_path), + "local_mirror_pid": str(process.pid), + } + + if __name__ == "__main__": raise SystemExit(main(sys.argv[1:])) diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index e6fdb548..33890f36 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -42,14 +42,28 @@ class TTTAutoResearchConfig: kl_penalty_coef: float = 0.1 phase1_max_tokens: int = 26000 save_every: int = 2 - wandb_project: str | None = "autoresearch-ttt-discover" + wandb_project: str | None = None num_cpus_per_task: int = 0 eval_timeout: int | None = None local_model_path: str | None = None keep_history: int = 6 - max_concurrent_evaluations: int = 16 + max_concurrent_evaluations: int = 8 gpu_devices: list[str] | None = None - execution_backend: str = "runpod" + execution_backend: str = "hyperbolic" + hyperbolic_ssh_host: str | None = None + hyperbolic_ssh_port: int = 22 + hyperbolic_ssh_user: str = "ubuntu" + hyperbolic_ssh_private_key_path: str | None = None + hyperbolic_repo_root: str = "/home/ubuntu/autoresearch" + hyperbolic_prepare_num_shards: int = 10 + hyperbolic_bootstrap_timeout_sec: int = 7200 + hyperbolic_bootstrap_commands: list[str] | None = None + hyperbolic_detached_controller: bool = True + hyperbolic_remote_run_dir: str | None = None + hyperbolic_forward_env_vars: list[str] | None = None + hyperbolic_local_mirror: bool = True + hyperbolic_sync_interval_sec: int = 30 + hyperbolic_local_mirror_dir: str | None = None runpod_api_key_env: str = "RUNPOD_API_KEY" runpod_api_base: str = "https://rest.runpod.io/v1" runpod_cloud_type: str = "COMMUNITY" @@ -77,8 +91,11 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": run_dir = _resolve_path(self.run_dir, repo_root) if self.run_dir else repo_root / "runs" / datetime.now().strftime("%Y%m%d_%H%M%S") experiment_name = self.experiment_name or run_dir.name execution_backend = self.execution_backend.lower() - if execution_backend not in {"local", "runpod"}: - raise ValueError("execution_backend must be either 'local' or 'runpod'.") + if execution_backend not in {"local", "runpod", "hyperbolic"}: + raise ValueError("execution_backend must be one of 'local', 'runpod', or 'hyperbolic'.") + gpu_devices = _normalize_string_list(self.gpu_devices) + if execution_backend == "hyperbolic" and not gpu_devices: + gpu_devices = [str(index) for index in range(8)] return TTTAutoResearchConfig( model_name=self.model_name, provider=self.provider, @@ -106,8 +123,32 @@ def normalized(self, repo_root: Path) -> "TTTAutoResearchConfig": local_model_path=_resolve_optional_path_str(self.local_model_path, repo_root), keep_history=self.keep_history, max_concurrent_evaluations=max(1, int(self.max_concurrent_evaluations)), - gpu_devices=_normalize_string_list(self.gpu_devices), + gpu_devices=gpu_devices, execution_backend=execution_backend, + hyperbolic_ssh_host=self.hyperbolic_ssh_host, + hyperbolic_ssh_port=max(1, int(self.hyperbolic_ssh_port)), + hyperbolic_ssh_user=self.hyperbolic_ssh_user, + hyperbolic_ssh_private_key_path=_resolve_optional_path_str(self.hyperbolic_ssh_private_key_path, repo_root), + hyperbolic_repo_root=self.hyperbolic_repo_root.rstrip("/"), + hyperbolic_prepare_num_shards=max(2, int(self.hyperbolic_prepare_num_shards)), + hyperbolic_bootstrap_timeout_sec=max(300, int(self.hyperbolic_bootstrap_timeout_sec)), + hyperbolic_bootstrap_commands=_normalize_command(self.hyperbolic_bootstrap_commands), + hyperbolic_detached_controller=bool(self.hyperbolic_detached_controller), + hyperbolic_remote_run_dir=self.hyperbolic_remote_run_dir, + hyperbolic_forward_env_vars=_normalize_string_list(self.hyperbolic_forward_env_vars) + or [ + "OPENAI_API_KEY", + "OPENAI_BASE_URL", + "OPENAI_API_BASE", + "TINKER_API_KEY", + "TINKER_BASE_URL", + "TINKER_PROVIDER", + "TM_API_KEY", + "HF_TOKEN", + ], + hyperbolic_local_mirror=bool(self.hyperbolic_local_mirror), + hyperbolic_sync_interval_sec=max(5, int(self.hyperbolic_sync_interval_sec)), + hyperbolic_local_mirror_dir=_resolve_optional_path_str(self.hyperbolic_local_mirror_dir, repo_root), runpod_api_key_env=self.runpod_api_key_env, runpod_api_base=self.runpod_api_base.rstrip("/"), runpod_cloud_type=self.runpod_cloud_type.upper(), @@ -176,6 +217,8 @@ def subprocess_env(self) -> dict[str, str]: def infer_renderer_name(model_name: str) -> str | None: lowered = model_name.lower() + if "kimi-k2" in lowered or "moonshotai/kimi" in lowered: + return "qwen3" if "qwen" in lowered: if "instruct" in lowered: return "qwen3_instruct" diff --git a/ttt_autoresearch/discover_compat.py b/ttt_autoresearch/discover_compat.py index 3e87eaa1..4ca9667a 100644 --- a/ttt_autoresearch/discover_compat.py +++ b/ttt_autoresearch/discover_compat.py @@ -86,3 +86,124 @@ def __init__(self, renderer: Any, initial_state: State, sampler: Any, config: An self.eval_timeout = getattr(config, "eval_timeout", 0) self.num_cpus_per_task = getattr(config, "num_cpus_per_task", 0) + +def patch_ttt_discover_no_wandb_bug() -> None: + """Pad discover's multiplex logger so W&B-optional runs do not crash. + + Upstream do_sync_training incorrectly checks ``len(loggers) >= 2`` and then + indexes ``loggers[2]``. When W&B is disabled, setup_logging only creates two + loggers, so the first train step crashes. We pad the logger list with no-op + loggers until index 2 is always safe. + """ + + try: + from ttt_discover.tinker_utils import ml_log + except ImportError: + return + + if getattr(ml_log, "_autoresearch_no_wandb_patch", False): + return + + class _NullLogger(ml_log.Logger): + def log_hparams(self, config: Any) -> None: + return None + + def log_metrics(self, metrics: dict[str, Any], step: int | None = None) -> None: + return None + + original_setup_logging = ml_log.setup_logging + + def patched_setup_logging(*args: Any, **kwargs: Any): + logger = original_setup_logging(*args, **kwargs) + if hasattr(logger, "loggers"): + while len(logger.loggers) < 3: + logger.loggers.append(_NullLogger()) + return logger + + ml_log.setup_logging = patched_setup_logging + ml_log._autoresearch_no_wandb_patch = True + + +def patch_ttt_discover_kimi_tokenizer() -> None: + """Teach upstream discover tokenizers to trust_remote_code for Kimi K2.5. + + Upstream currently special-cases only ``moonshotai/Kimi-K2-Thinking`` in some + tokenizer paths. ``moonshotai/Kimi-K2.5`` requires the same trust_remote_code + handling, otherwise detached runs die on an interactive prompt. + """ + + def _wrap_get_tokenizer(module: Any, sentinel_name: str) -> None: + if getattr(module, sentinel_name, False): + return + + original_get_tokenizer = module.get_tokenizer + + def patched_get_tokenizer(model_name: str): + if model_name == "moonshotai/Kimi-K2.5": + import os + from transformers.models.auto.tokenization_auto import AutoTokenizer + + if os.path.isdir(model_name): + return AutoTokenizer.from_pretrained( + model_name, + use_fast=True, + local_files_only=True, + trust_remote_code=True, + ) + return AutoTokenizer.from_pretrained( + model_name, + use_fast=True, + trust_remote_code=True, + ) + return original_get_tokenizer(model_name) + + module.get_tokenizer = patched_get_tokenizer + setattr(module, sentinel_name, True) + + try: + from ttt_discover.tinker_utils import misc_utils + except ImportError: + misc_utils = None + if misc_utils is not None: + _wrap_get_tokenizer(misc_utils, "_autoresearch_kimi_patch") + + try: + from ttt_discover.tinker_utils import renderers + except ImportError: + renderers = None + if renderers is not None: + _wrap_get_tokenizer(renderers, "_autoresearch_kimi_patch") + + try: + from ttt_discover.tinker_utils import dataset_builder + except ImportError: + dataset_builder = None + if dataset_builder is not None and misc_utils is not None: + dataset_builder.get_tokenizer = misc_utils.get_tokenizer + + +def patch_transformers_kimi_trust_remote_code() -> None: + """Force trust_remote_code=True for Kimi K2.5 tokenizer loads. + + This catches code paths that bypass discover's helper and call the + Transformers auto-tokenizer directly. + """ + + try: + from transformers.models.auto.tokenization_auto import AutoTokenizer + except ImportError: + return + + if getattr(AutoTokenizer, "_autoresearch_kimi_trust_patch", False): + return + + original_from_pretrained = AutoTokenizer.from_pretrained + + def patched_from_pretrained(pretrained_model_name_or_path: Any, *args: Any, **kwargs: Any): + model_name = str(pretrained_model_name_or_path) + if model_name == "moonshotai/Kimi-K2.5": + kwargs.setdefault("trust_remote_code", True) + return original_from_pretrained(pretrained_model_name_or_path, *args, **kwargs) + + AutoTokenizer.from_pretrained = patched_from_pretrained + AutoTokenizer._autoresearch_kimi_trust_patch = True diff --git a/ttt_autoresearch/env.py b/ttt_autoresearch/env.py index 63c2e08e..c9eff46e 100644 --- a/ttt_autoresearch/env.py +++ b/ttt_autoresearch/env.py @@ -7,9 +7,9 @@ from ttt_autoresearch.config import BootstrapContext from ttt_autoresearch.discover_compat import Environment, State, VerifyResult -from ttt_autoresearch.prompt_builder import build_rollout_prompt +from ttt_autoresearch.prompt_builder import build_prompt_for_state from ttt_autoresearch.reward import AutoResearchRewardEvaluator -from ttt_autoresearch.runner import parse_patch_candidate +from ttt_autoresearch.runner import parse_patch_candidate_for_state class AutoResearchState(State): def __init__( @@ -164,7 +164,7 @@ def is_maximize(self) -> bool: return False def _get_code_languages(self) -> list[str]: - return ["json"] + return ["python"] def _should_keep_code_separators(self) -> bool: return False @@ -177,25 +177,11 @@ def get_question(self) -> str: target = self.bootstrap.config.target_val_bpb if target is None: target = state.current_best_val_bpb - state_ctx = state.to_prompt(target, metric_name="val_bpb", maximize=False, language="python") - return build_rollout_prompt( - state_ctx=state_ctx, - construction_section=( - "You may want to start your search from the current training script shown above.\n" - "This is the current starting point selected by the search procedure.\n" - "You are encouraged to explore meaningfully different directions if the current approach appears saturated." - ), - code_section=( - "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" - "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" - "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" - "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." - ), - ) + return build_prompt_for_state(state, target) def check_format(self, parsed_code: str) -> bool: try: - parse_patch_candidate(parsed_code) + parse_patch_candidate_for_state(parsed_code, self.initial_state.current_train_py) except ValueError: return False return True @@ -204,7 +190,7 @@ async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: if not self.check_format(parsed_code): return VerifyResult( reward=0.0, - msg="Invalid candidate JSON.", + msg="Invalid candidate train.py patch payload.", correctness=0.0, raw_score=float(self.initial_state.current_best_val_bpb), result_construction=[], @@ -225,7 +211,7 @@ async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: ) def _create_next_state(self, step_idx: int, parsed_code: str, outs: VerifyResult) -> AutoResearchState: - candidate = parse_patch_candidate(parsed_code) + candidate = parse_patch_candidate_for_state(parsed_code, self.initial_state.current_train_py) parent_best = self.initial_state.current_best_val_bpb new_best = min(parent_best, outs.raw_score) if outs.raw_score is not None else parent_best return AutoResearchState( diff --git a/ttt_autoresearch/hyperbolic.py b/ttt_autoresearch/hyperbolic.py new file mode 100644 index 00000000..9bc02f9d --- /dev/null +++ b/ttt_autoresearch/hyperbolic.py @@ -0,0 +1,491 @@ +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +import json +import os +import shlex +import subprocess +import tarfile +import tempfile +import threading +import time +import uuid + +from ttt_autoresearch.config import TTTAutoResearchConfig + + +class HyperbolicError(RuntimeError): + pass + + +@dataclass(slots=True) +class RemoteExecutionResult: + stdout: str + stderr: str + returncode: int | None + elapsed_sec: float + + +class HyperbolicPool: + def __init__(self, repo_root: Path, run_dir: Path, config: TTTAutoResearchConfig) -> None: + self.repo_root = repo_root + self.run_dir = run_dir + self.config = config + self.repo_archive_path = self.run_dir / "hyperbolic_repo_bundle.tar.gz" + self.repo_archive_lock = threading.Lock() + self.bootstrap_lock = threading.Lock() + self.bootstrap_complete = False + self._validate_config() + self._validate_ssh_key() + self._write_repo_archive() + + def execute_workspace( + self, + workspace: Path, + command: list[str], + env: dict[str, str], + timeout_sec: int, + label: str, + ) -> RemoteExecutionResult: + self._ensure_node_ready() + return self._run_workspace_on_node(workspace, command, env, timeout_sec, label) + + def close(self) -> None: + return None + + def launch_detached_controller(self) -> dict[str, str]: + self._ensure_node_ready() + self._assert_no_active_remote_runs() + run_name = Path(self.run_dir).name + remote_run_dir = self.config.hyperbolic_remote_run_dir or f"{self.config.hyperbolic_repo_root}/runs/{run_name}" + remote_launch_dir = f"{self.config.hyperbolic_repo_root}/runs/launches/{run_name}" + remote_config_path = f"{remote_launch_dir}/remote_config.yaml" + remote_log_path = f"{remote_launch_dir}/controller.log" + remote_pid_path = f"{remote_launch_dir}/controller.pid" + remote_exitcode_path = f"{remote_launch_dir}/controller.exitcode" + remote_metadata_path = f"{remote_launch_dir}/launch.json" + remote_start_path = f"{remote_launch_dir}/start_controller.sh" + + remote_config = self._build_remote_controller_config(remote_run_dir) + with tempfile.NamedTemporaryFile("w", suffix=".yaml", delete=False, encoding="utf-8") as handle: + handle.write(_dump_yaml_like(remote_config)) + local_config_path = Path(handle.name) + try: + self._run_ssh(f"mkdir -p {shlex.quote(remote_launch_dir)} {shlex.quote(remote_run_dir)}", timeout=60, check=True) + self._upload_file(local_config_path, remote_config_path) + finally: + local_config_path.unlink(missing_ok=True) + + forwarded_env = {} + for name in self.config.hyperbolic_forward_env_vars or []: + value = os.environ.get(name) + if value: + forwarded_env[name] = value + # Tinker clients expect TINKER_API_KEY, but many local shells are configured + # with OPENAI_API_KEY only. Mirror them so detached remote runs keep working. + if "TINKER_API_KEY" not in forwarded_env and os.environ.get("OPENAI_API_KEY"): + forwarded_env["TINKER_API_KEY"] = os.environ["OPENAI_API_KEY"] + if "OPENAI_API_KEY" not in forwarded_env and os.environ.get("TINKER_API_KEY"): + forwarded_env["OPENAI_API_KEY"] = os.environ["TINKER_API_KEY"] + if self.config.provider: + forwarded_env.setdefault("TINKER_PROVIDER", self.config.provider) + if self.config.api_base: + forwarded_env.setdefault("OPENAI_BASE_URL", self.config.api_base) + forwarded_env.setdefault("OPENAI_API_BASE", self.config.api_base) + forwarded_env.setdefault("TINKER_BASE_URL", self.config.api_base) + start_script = "\n".join( + [ + "#!/usr/bin/env bash", + "set -euo pipefail", + f"mkdir -p {shlex.quote(remote_launch_dir)}", + f"if [ -f {shlex.quote(remote_pid_path)} ] && kill -0 \"$(cat {shlex.quote(remote_pid_path)})\" 2>/dev/null; then", + f" echo 'Controller already running at {remote_pid_path}'", + " exit 1", + "fi", + 'export PATH="$HOME/.local/bin:$PATH"', + *[f"export {name}={shlex.quote(value)}" for name, value in sorted(forwarded_env.items())], + f"cd {shlex.quote(self.config.hyperbolic_repo_root)}", + "nohup bash -lc " + + shlex.quote( + f"cd {self.config.hyperbolic_repo_root} && " + f"uv run python run_ttt_discover.py --config {remote_config_path} " + f"> {remote_log_path} 2>&1; " + f"rc=$?; printf '%s' \"$rc\" > {remote_exitcode_path}" + ) + + " < /dev/null > /dev/null 2>&1 &", + f"printf '%s' \"$!\" > {shlex.quote(remote_pid_path)}", + f"cat > {shlex.quote(remote_metadata_path)} <<'JSON'", + json.dumps( + { + "remote_run_dir": remote_run_dir, + "remote_config_path": remote_config_path, + "remote_log_path": remote_log_path, + "remote_pid_path": remote_pid_path, + "remote_exitcode_path": remote_exitcode_path, + "remote_start_path": remote_start_path, + }, + indent=2, + sort_keys=True, + ), + "JSON", + ] + ) + "\n" + with tempfile.NamedTemporaryFile("w", suffix=".sh", delete=False, encoding="utf-8") as handle: + handle.write(start_script) + local_start_path = Path(handle.name) + try: + self._upload_file(local_start_path, remote_start_path) + finally: + local_start_path.unlink(missing_ok=True) + self._run_ssh( + f"chmod +x {shlex.quote(remote_start_path)} && {shlex.quote(remote_start_path)}", + timeout=30, + check=True, + ) + return { + "remote_run_dir": remote_run_dir, + "remote_config_path": remote_config_path, + "remote_log_path": remote_log_path, + "remote_pid_path": remote_pid_path, + "remote_exitcode_path": remote_exitcode_path, + "remote_launch_dir": remote_launch_dir, + "remote_start_path": remote_start_path, + } + + def _assert_no_active_remote_runs(self) -> None: + repo_root = self.config.hyperbolic_repo_root + controller_pattern = f"run_ttt_discover.py --config {repo_root}/runs/launches/" + train_pattern = f"{repo_root}/.venv/bin/python3 train.py" + script = "\n".join( + [ + "set -euo pipefail", + f"controller_matches=$(pgrep -af {shlex.quote(controller_pattern)} || true)", + f"train_matches=$(pgrep -af {shlex.quote(train_pattern)} || true)", + 'if [ -n "$controller_matches" ] || [ -n "$train_matches" ]; then', + " echo 'Detected active AutoResearch processes already running on the Hyperbolic node.'", + " if [ -n \"$controller_matches\" ]; then", + " echo 'Controllers:'", + " echo \"$controller_matches\"", + " fi", + " if [ -n \"$train_matches\" ]; then", + " echo 'Train jobs:'", + " echo \"$train_matches\"", + " fi", + " exit 12", + "fi", + ] + ) + completed = self._run_ssh(script, timeout=30, check=False) + if completed.returncode != 0: + details = completed.stdout.strip() or completed.stderr.strip() + raise HyperbolicError( + "Refusing to launch because another detached AutoResearch run appears to still be active on the " + f"Hyperbolic node.\n{details}" + ) + + def _validate_config(self) -> None: + if not self.config.hyperbolic_ssh_host: + raise HyperbolicError( + "hyperbolic_ssh_host is not set. Create an on-demand Hyperbolic H100 node and set its SSH host in the config." + ) + + def _build_remote_controller_config(self, remote_run_dir: str) -> dict[str, object]: + config_dict = self.config.to_dict() + config_dict["execution_backend"] = "local" + config_dict["run_dir"] = remote_run_dir + config_dict["hyperbolic_detached_controller"] = False + config_dict["gpu_devices"] = self.config.gpu_devices or [str(index) for index in range(8)] + config_dict["max_concurrent_evaluations"] = min( + int(self.config.max_concurrent_evaluations), + len(config_dict["gpu_devices"]), + ) + return config_dict + + def _validate_ssh_key(self) -> None: + key_path = self.config.hyperbolic_ssh_private_key_path + if key_path: + if not Path(key_path).exists(): + raise HyperbolicError(f"SSH private key not found at {key_path}") + return + default_keys = [Path.home() / ".ssh" / name for name in ("id_ed25519", "id_rsa", "id_ecdsa")] + has_agent = bool(os.environ.get("SSH_AUTH_SOCK")) + has_default = any(key.exists() for key in default_keys) + if not has_agent and not has_default: + raise HyperbolicError( + "No SSH private key configured for Hyperbolic. " + "Set hyperbolic_ssh_private_key_path, ensure a default SSH key exists, or run an ssh-agent." + ) + + def _ensure_node_ready(self) -> None: + with self.bootstrap_lock: + if self.bootstrap_complete: + return + self._wait_for_ssh() + self._bootstrap_node() + self.bootstrap_complete = True + + def _wait_for_ssh(self) -> None: + deadline = time.time() + self.config.hyperbolic_bootstrap_timeout_sec + while time.time() < deadline: + try: + completed = self._run_ssh("true", timeout=30, check=False) + except HyperbolicError: + time.sleep(5) + continue + if completed.returncode == 0: + return + time.sleep(5) + raise HyperbolicError("Hyperbolic node never accepted SSH before the bootstrap timeout.") + + def _bootstrap_node(self) -> None: + remote_archive = "/tmp/autoresearch_repo_bundle.tar.gz" + self._upload_file(self.repo_archive_path, remote_archive) + repo_root = self.config.hyperbolic_repo_root + bootstrap_commands = self.config.hyperbolic_bootstrap_commands or [ + "python3 -m pip install --user --upgrade uv", + "cd {repo_root} && uv sync", + "cd {repo_root} && uv run prepare.py --num-shards {prepare_num_shards}", + ] + rendered = [ + command.format( + repo_root=repo_root, + prepare_num_shards=self.config.hyperbolic_prepare_num_shards, + ) + for command in bootstrap_commands + ] + script_lines = [ + "set -euo pipefail", + 'export PATH="$HOME/.local/bin:$PATH"', + f"rm -rf {shlex.quote(repo_root)}", + f"mkdir -p {shlex.quote(repo_root)}", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)}", + f"if [ ! -f {shlex.quote(repo_root)}/pyproject.toml ]; then rm -rf {shlex.quote(repo_root)}/* && tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)} --strip-components=1; fi", + f"test -f {shlex.quote(repo_root)}/pyproject.toml", + f"test -f {shlex.quote(repo_root)}/prepare.py", + ] + script_lines.extend(rendered) + self._run_ssh( + "\n".join(script_lines), + timeout=self.config.hyperbolic_bootstrap_timeout_sec, + check=True, + ) + + def _run_workspace_on_node( + self, + workspace: Path, + command: list[str], + env: dict[str, str], + timeout_sec: int, + label: str, + ) -> RemoteExecutionResult: + remote_workspace = f"{self.config.hyperbolic_repo_root}/../jobs/{label}-{uuid.uuid4().hex[:8]}" + remote_archive = f"/tmp/{uuid.uuid4().hex}.tar.gz" + local_archive = self._build_workspace_archive(workspace) + try: + self._upload_file(local_archive, remote_archive) + env_lines = [] + for key in sorted(env): + env_lines.append(f"export {key}={shlex.quote(env[key])}") + env_lines.append("export PYTHONUNBUFFERED=1") + command_str = " ".join(shlex.quote(part) for part in command) + script = "\n".join( + [ + "set -uo pipefail", + f"rm -rf {shlex.quote(remote_workspace)}", + f"mkdir -p {shlex.quote(remote_workspace)}", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)}", + f"if [ ! -f {shlex.quote(remote_workspace)}/train.py ]; then rm -rf {shlex.quote(remote_workspace)}/* && tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)} --strip-components=1; fi", + f"test -f {shlex.quote(remote_workspace)}/train.py", + f"cd {shlex.quote(remote_workspace)}", + *env_lines, + f"timeout --kill-after=30s {timeout_sec}s {command_str} > stdout.log 2> stderr.log", + "rc=$?", + 'printf "%s" "$rc" > .exit_code', + "exit 0", + ] + ) + start = time.time() + self._run_ssh(script, timeout=timeout_sec + 180, check=True) + elapsed = time.time() - start + stdout = self._download_text_file(f"{remote_workspace}/stdout.log") + stderr = self._download_text_file(f"{remote_workspace}/stderr.log") + exit_text = self._download_text_file(f"{remote_workspace}/.exit_code").strip() + if exit_text: + try: + returncode = int(exit_text) + except ValueError: + returncode = 1 + else: + returncode = 1 + metrics_json = self._download_text_file(f"{remote_workspace}/metrics.json") + if metrics_json: + (workspace / "metrics.json").write_text(metrics_json, encoding="utf-8") + return RemoteExecutionResult(stdout=stdout, stderr=stderr, returncode=returncode, elapsed_sec=elapsed) + finally: + try: + self._run_ssh( + "\n".join( + [ + "set -e", + f"rm -rf {shlex.quote(remote_workspace)}", + f"rm -f {shlex.quote(remote_archive)}", + ] + ), + timeout=60, + check=False, + ) + except HyperbolicError: + pass + local_archive.unlink(missing_ok=True) + + def _upload_file(self, local_path: Path, remote_path: str) -> None: + destination = f"{self.config.hyperbolic_ssh_user}@{self.config.hyperbolic_ssh_host}:{remote_path}" + try: + completed = subprocess.run( + self._scp_base_args() + [str(local_path), destination], + text=True, + capture_output=True, + timeout=600, + check=False, + ) + except subprocess.TimeoutExpired as exc: + raise HyperbolicError(f"Timed out uploading {local_path.name} to the Hyperbolic node.") from exc + if completed.returncode != 0: + raise HyperbolicError(completed.stderr.strip() or f"scp upload failed for {local_path.name}.") + + def _download_text_file(self, remote_path: str) -> str: + with tempfile.TemporaryDirectory() as tmpdir: + local_path = Path(tmpdir) / Path(remote_path).name + source = f"{self.config.hyperbolic_ssh_user}@{self.config.hyperbolic_ssh_host}:{remote_path}" + try: + completed = subprocess.run( + self._scp_base_args() + [source, str(local_path)], + text=True, + capture_output=True, + timeout=600, + check=False, + ) + except subprocess.TimeoutExpired as exc: + raise HyperbolicError(f"Timed out downloading {remote_path} from the Hyperbolic node.") from exc + if completed.returncode != 0: + return "" + return local_path.read_text(encoding="utf-8") + + def _run_ssh(self, script: str, timeout: int, check: bool) -> subprocess.CompletedProcess[str]: + try: + completed = subprocess.run( + self._ssh_base_args() + [f"bash -lc {shlex.quote(script)}"], + text=True, + capture_output=True, + timeout=timeout, + check=False, + ) + except subprocess.TimeoutExpired as exc: + raise HyperbolicError("Timed out waiting for a remote SSH command on the Hyperbolic node.") from exc + if check and completed.returncode != 0: + raise HyperbolicError(completed.stderr.strip() or completed.stdout.strip() or "Remote command failed on the Hyperbolic node.") + return completed + + def _ssh_base_args(self) -> list[str]: + args = [ + "ssh", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "ConnectTimeout=10", + ] + if self.config.hyperbolic_ssh_private_key_path: + args.extend(["-i", self.config.hyperbolic_ssh_private_key_path]) + args.extend( + [ + "-p", + str(self.config.hyperbolic_ssh_port), + f"{self.config.hyperbolic_ssh_user}@{self.config.hyperbolic_ssh_host}", + ] + ) + return args + + def _scp_base_args(self) -> list[str]: + args = [ + "scp", + "-O", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-o", + "ConnectTimeout=10", + ] + if self.config.hyperbolic_ssh_private_key_path: + args.extend(["-i", self.config.hyperbolic_ssh_private_key_path]) + args.extend(["-P", str(self.config.hyperbolic_ssh_port)]) + return args + + def _write_repo_archive(self) -> None: + with self.repo_archive_lock: + with tarfile.open(self.repo_archive_path, "w:gz") as archive: + for path in self.repo_root.rglob("*"): + if not path.is_file(): + continue + rel = path.relative_to(self.repo_root) + if self._should_skip(rel): + continue + archive.add(path, arcname=str(rel)) + + def _build_workspace_archive(self, workspace: Path) -> Path: + fd, archive_path = tempfile.mkstemp(prefix="workspace_", suffix=".tar.gz") + os.close(fd) + archive_file = Path(archive_path) + with tarfile.open(archive_file, "w:gz") as archive: + for path in workspace.rglob("*"): + if not path.is_file(): + continue + rel = path.relative_to(workspace) + if self._should_skip(rel): + continue + archive.add(path, arcname=str(rel)) + return archive_file + + @staticmethod + def _should_skip(rel: Path) -> bool: + parts = rel.parts + if not parts: + return False + if parts[0] in {".git", "runs", "__pycache__", ".pytest_cache", ".venv"}: + return True + if rel.name in {"prompt.txt", "response.txt"}: + return True + return rel.suffix in {".pyc", ".pyo"} + + +def _dump_yaml_like(payload: dict[str, object]) -> str: + lines: list[str] = [] + for key, value in payload.items(): + if isinstance(value, list): + lines.append(f"{key}:") + for item in value: + if item is None: + rendered = "null" + elif isinstance(item, bool): + rendered = "true" if item else "false" + else: + rendered = json.dumps(item) + lines.append(f" - {rendered}") + continue + if value is None: + rendered = "null" + elif isinstance(value, bool): + rendered = "true" if value else "false" + elif isinstance(value, (int, float)): + rendered = str(value) + else: + rendered = json.dumps(value) + lines.append(f"{key}: {rendered}") + return "\n".join(lines) + "\n" diff --git a/ttt_autoresearch/hyperbolic_mirror.py b/ttt_autoresearch/hyperbolic_mirror.py new file mode 100644 index 00000000..187f5e24 --- /dev/null +++ b/ttt_autoresearch/hyperbolic_mirror.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import argparse +from pathlib import Path +import shutil +import subprocess +import sys +import time + + +def build_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description="Mirror a remote Hyperbolic run directory back to the local machine.") + parser.add_argument("--host", required=True) + parser.add_argument("--port", type=int, default=22) + parser.add_argument("--user", default="ubuntu") + parser.add_argument("--identity-file") + parser.add_argument("--remote-run-dir", required=True) + parser.add_argument("--remote-launch-dir", required=True) + parser.add_argument("--remote-exitcode-path", required=True) + parser.add_argument("--local-dest", required=True) + parser.add_argument("--interval-sec", type=int, default=30) + return parser + + +def main(argv: list[str] | None = None) -> int: + parser = build_arg_parser() + args = parser.parse_args(argv) + + local_dest = Path(args.local_dest) + local_dest.mkdir(parents=True, exist_ok=True) + local_launch_dir = local_dest / "_remote_launch" + local_launch_dir.mkdir(parents=True, exist_ok=True) + + while True: + _sync_remote_dir( + host=args.host, + port=args.port, + user=args.user, + identity_file=args.identity_file, + remote_dir=args.remote_run_dir, + local_dir=local_dest, + ) + _sync_remote_dir( + host=args.host, + port=args.port, + user=args.user, + identity_file=args.identity_file, + remote_dir=args.remote_launch_dir, + local_dir=local_launch_dir, + ) + if _remote_file_exists( + host=args.host, + port=args.port, + user=args.user, + identity_file=args.identity_file, + remote_path=args.remote_exitcode_path, + ): + _sync_remote_dir( + host=args.host, + port=args.port, + user=args.user, + identity_file=args.identity_file, + remote_dir=args.remote_run_dir, + local_dir=local_dest, + ) + _sync_remote_dir( + host=args.host, + port=args.port, + user=args.user, + identity_file=args.identity_file, + remote_dir=args.remote_launch_dir, + local_dir=local_launch_dir, + ) + return 0 + time.sleep(max(5, args.interval_sec)) + + +def _sync_remote_dir( + *, + host: str, + port: int, + user: str, + identity_file: str | None, + remote_dir: str, + local_dir: Path, +) -> None: + rsync = shutil.which("rsync") + if rsync: + cmd = [ + rsync, + "-az", + "--delete", + "-e", + _ssh_command(port=port, identity_file=identity_file), + f"{user}@{host}:{remote_dir.rstrip('/')}/", + str(local_dir), + ] + subprocess.run(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + return + cmd = _ssh_base_args(port=port, identity_file=identity_file) + [ + f"{user}@{host}", + f"bash -lc {shlex_quote(f'cd {remote_dir} && tar -cf - .')}", + ] + proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=False) + if proc.returncode != 0: + return + extract = subprocess.run( + ["tar", "-xf", "-", "-C", str(local_dir)], + input=proc.stdout, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=False, + ) + if extract.returncode != 0: + return + + +def _remote_file_exists(*, host: str, port: int, user: str, identity_file: str | None, remote_path: str) -> bool: + cmd = _ssh_base_args(port=port, identity_file=identity_file) + [ + f"{user}@{host}", + f"bash -lc {shlex_quote(f'test -f {remote_path}')}", + ] + return subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=False).returncode == 0 + + +def _ssh_command(*, port: int, identity_file: str | None) -> str: + parts = [ + "ssh", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-p", + str(port), + ] + if identity_file: + parts.extend(["-i", identity_file]) + return " ".join(parts) + + +def _ssh_base_args(*, port: int, identity_file: str | None) -> list[str]: + args = [ + "ssh", + "-o", + "BatchMode=yes", + "-o", + "StrictHostKeyChecking=no", + "-o", + "UserKnownHostsFile=/dev/null", + "-p", + str(port), + ] + if identity_file: + args.extend(["-i", identity_file]) + return args + + +def shlex_quote(value: str) -> str: + return "'" + value.replace("'", "'\"'\"'") + "'" + + +if __name__ == "__main__": + raise SystemExit(main(sys.argv[1:])) diff --git a/ttt_autoresearch/prompt_builder.py b/ttt_autoresearch/prompt_builder.py index 692bca63..f871a14e 100644 --- a/ttt_autoresearch/prompt_builder.py +++ b/ttt_autoresearch/prompt_builder.py @@ -1,5 +1,25 @@ from __future__ import annotations +from typing import Any + + +CONSTRUCTION_SECTION = ( + "You may want to start your search from the current training script shown above.\n" + "This is the current starting point selected by the search procedure.\n" + "Make one focused experimental change at a time and preserve a working script.\n" + "You are encouraged to explore meaningfully different directions if the current approach appears saturated." +) + +CODE_SECTION = ( + "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" + "Prefer small, local hill-climbing edits over broad rewrites.\n" + "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" + "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" + "Do not refactor unrelated code.\n" + "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." +) + + def build_rollout_prompt( *, state_ctx: str, @@ -36,22 +56,51 @@ def build_rollout_prompt( - The tokenizer / vocabulary setup is fixed at vocab size `8192` - The training script must remain compatible with the existing BOS-aligned bin-packing data pipeline - The model implementation must continue to support `forward(x, y, reduction='none')` +- Keep `TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0` +- Preserve the final summary prints, especially the line that starts with `val_bpb:` ## Rules - You may only edit `train.py` - Do not modify `prepare.py`, dependencies, or the evaluation harness -- Return exactly one ```json``` block with this schema: -{{ - "summary": "short description of the change", - "rationale": "why this should improve val_bpb", - "train_py": "the full replacement contents of train.py" -}} -- `train_py` must be the full file, not a diff +- Return only one or more exact SEARCH/REPLACE edit blocks for `train.py` +- Prefer 1-3 small patch blocks +- Each SEARCH block must copy exact contiguous text from the current `train.py` +- If you change constants or a small code region, include enough surrounding context in SEARCH to make the patch unique +- Treat each SEARCH block like an exact `old_string` tool argument: it must match exactly once +- Do not return the full file +- Do not return standalone code fragments +- Do not wrap the answer in JSON +- Do not wrap the answer in markdown code fences +- Do not include any commentary, rationale, summary, or prose before or after the patch +- Do not abbreviate with `...` or placeholders; each replacement must be fully expanded source code +- Each patch block must use exactly this format: +<<<<<<< SEARCH +[exact existing text from the current train.py] +======= +[new replacement text] +>>>>>>> REPLACE +- The SEARCH text must match the current starting `train.py` exactly - Propose exactly one candidate for this rollout - Optimize for the lowest `val_bpb` under the fixed time budget - Prefer simpler changes when improvement is similar +## Example Response +<<<<<<< SEARCH +TOTAL_BATCH_SIZE = 524288 +======= +TOTAL_BATCH_SIZE = 393216 +>>>>>>> REPLACE + {state_ctx} {construction_section} {code_section} """ + + +def build_prompt_for_state(state: Any, target: float) -> str: + state_ctx = state.to_prompt(target, metric_name="val_bpb", maximize=False, language="python") + return build_rollout_prompt( + state_ctx=state_ctx, + construction_section=CONSTRUCTION_SECTION, + code_section=CODE_SECTION, + ) diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py index 276a8514..978401c6 100644 --- a/ttt_autoresearch/reward.py +++ b/ttt_autoresearch/reward.py @@ -7,7 +7,14 @@ from ttt_autoresearch.config import BootstrapContext from ttt_autoresearch.discover_compat import BaseRewardEvaluator -from ttt_autoresearch.runner import AutoResearchRunner, PatchCandidate, RunResult, parse_patch_candidate +from ttt_autoresearch.prompt_builder import build_prompt_for_state +from ttt_autoresearch.runner import ( + AutoResearchRunner, + PatchCandidate, + PreflightResult, + RunResult, + parse_patch_candidate_for_state, +) _ARTIFACT_LOCK = threading.Lock() @@ -63,17 +70,36 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: def get_reward(self, code: str, state: Any) -> dict[str, Any]: if self.bootstrap is None or self.runner is None: raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + prompt = self._prompt_for_state(state) + step = getattr(state, "timestep", -1) + 1 + state_id = getattr(state, "id", "unknown") try: - candidate = parse_patch_candidate(code) + candidate = parse_patch_candidate_for_state(code, state.current_train_py) except ValueError as exc: return self._persist_invalid_candidate( code=code, + prompt=prompt, state=state, error_message=f"Invalid candidate payload: {exc}", ) - result = self._run_candidate(candidate, state) + workspace = self.runner.prepare_candidate_workspace(candidate, step=step) + (workspace / "prompt.txt").write_text(prompt, encoding="utf-8") + (workspace / "response.txt").write_text(code, encoding="utf-8") + preflight = self.runner.preflight_candidate(workspace, candidate) + self.runner.write_json_artifact(workspace / "preflight.json", preflight.to_dict()) + if not preflight.ok: + return self._persist_preflight_failed_candidate( + candidate=candidate, + code=code, + prompt=prompt, + state=state, + workspace=workspace, + preflight=preflight, + ) + + result = self._run_candidate(candidate, state, workspace) current_best = self._current_best_from_state(state) reward, correctness = reward_for_result(result) improved_global_best = False @@ -87,11 +113,14 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: rationale=candidate.rationale, ) history_entry = { - "step": getattr(state, "timestep", -1) + 1, - "state_id": getattr(state, "id", "unknown"), + "step": step, + "state_id": state_id, "status": result.status, "summary": candidate.summary, "rationale": candidate.rationale, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, "reward": reward, "accepted": bool(result.status == "success" and result.val_bpb is not None), "val_bpb": result.val_bpb, @@ -100,26 +129,41 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "stderr_path": str(result.stderr_path), "workspace_path": str(result.workspace_path), "improved_global_best": improved_global_best, + "prompt_path": str(result.workspace_path / "prompt.txt"), + "preflight_path": str(result.workspace_path / "preflight.json"), + "failure_stage": "runtime" if result.status != "success" else "", + "failure_reason": result.status if result.status != "success" else "", } self.runner.append_history(history_entry) self.runner.write_rollout_manifest( result.workspace_path, { - "step": getattr(state, "timestep", -1) + 1, + "step": step, "starting_state": state.to_dict() if hasattr(state, "to_dict") else { - "id": getattr(state, "id", "unknown"), + "id": state_id, "timestep": getattr(state, "timestep", -1), }, "candidate": { "summary": candidate.summary, "rationale": candidate.rationale, "train_py": candidate.train_py, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, }, + "prompt": prompt, + "prompt_path": str(result.workspace_path / "prompt.txt"), + "raw_response": code, + "raw_response_path": str(result.workspace_path / "response.txt"), + "preflight": preflight.to_dict(), + "preflight_path": str(result.workspace_path / "preflight.json"), "evaluation": result.to_dict(), "reward": reward, "correctness": correctness, "message": self._build_message(candidate, result, current_best, reward), "improved_global_best": improved_global_best, + "failure_stage": "runtime" if result.status != "success" else "", + "failure_reason": result.status if result.status != "success" else "", }, ) @@ -136,16 +180,21 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "metrics": { "candidate_summary": candidate.summary, "candidate_rationale": candidate.rationale, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, "candidate_status": result.status, "candidate_val_bpb": result.val_bpb, "workspace_path": str(result.workspace_path), "stdout_path": str(result.stdout_path), "stderr_path": str(result.stderr_path), "improved_global_best": improved_global_best, + "prompt": prompt, + "preflight": preflight.to_dict(), }, } - def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) -> dict[str, Any]: + def _persist_invalid_candidate(self, code: str, prompt: str, state: Any, error_message: str) -> dict[str, Any]: if self.runner is None: raise RuntimeError("AutoResearchRewardEvaluator is not configured.") step = getattr(state, "timestep", -1) + 1 @@ -155,12 +204,26 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) workspace = self.runner.create_candidate_artifact_dir(step=step, prefix="invalid") response_path = workspace / "response.txt" response_path.write_text(code, encoding="utf-8") + prompt_path = workspace / "prompt.txt" + prompt_path.write_text(prompt, encoding="utf-8") + preflight_path = workspace / "preflight.json" + self.runner.write_json_artifact( + preflight_path, + { + "ok": False, + "stage": "invalid_format", + "reason": error_message, + "details": {}, + }, + ) metrics_path = workspace / "metrics.json" self.runner.write_json_artifact( metrics_path, { "candidate_status": "invalid_candidate", "error": error_message, + "prompt_path": str(prompt_path), + "candidate_format": "invalid_format", }, ) history_entry = { @@ -169,6 +232,9 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) "status": "invalid_candidate", "summary": "", "rationale": "", + "candidate_format": "invalid_format", + "patch_block_count": 0, + "lines_changed": 0, "reward": _FAIL_REWARD, "accepted": False, "val_bpb": None, @@ -178,6 +244,10 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) "workspace_path": str(workspace), "improved_global_best": False, "error": error_message, + "prompt_path": str(prompt_path), + "preflight_path": str(preflight_path), + "failure_stage": "invalid_format", + "failure_reason": error_message, } self.runner.append_history(history_entry) self.runner.write_rollout_manifest( @@ -189,8 +259,20 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) "timestep": getattr(state, "timestep", -1), }, "candidate": None, + "candidate_format": "invalid_format", + "patch_block_count": 0, + "lines_changed": 0, + "prompt": prompt, + "prompt_path": str(prompt_path), "raw_response_path": str(response_path), "raw_response": code, + "preflight": { + "ok": False, + "stage": "invalid_format", + "reason": error_message, + "details": {}, + }, + "preflight_path": str(preflight_path), "evaluation": { "status": "invalid_candidate", "workspace_path": str(workspace), @@ -200,6 +282,8 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) "correctness": 0.0, "message": error_message, "improved_global_best": False, + "failure_stage": "invalid_format", + "failure_reason": error_message, }, ) return self._failure_payload( @@ -209,7 +293,101 @@ def _persist_invalid_candidate(self, code: str, state: Any, error_message: str) status="invalid_candidate", ) - def _run_candidate(self, candidate: PatchCandidate, state: Any) -> RunResult: + def _persist_preflight_failed_candidate( + self, + *, + candidate: PatchCandidate, + code: str, + prompt: str, + state: Any, + workspace: Path, + preflight: PreflightResult, + ) -> dict[str, Any]: + if self.runner is None: + raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + step = getattr(state, "timestep", -1) + 1 + state_id = getattr(state, "id", "unknown") + current_best = self._current_best_from_state(state) + metrics_path = workspace / "metrics.json" + with _ARTIFACT_LOCK: + self.runner.write_json_artifact( + metrics_path, + { + "candidate_status": "preflight_failed", + "error": preflight.reason, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, + "preflight_path": str(workspace / "preflight.json"), + }, + ) + history_entry = { + "step": step, + "state_id": state_id, + "status": "preflight_failed", + "summary": candidate.summary, + "rationale": candidate.rationale, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, + "reward": _FAIL_REWARD, + "accepted": False, + "val_bpb": None, + "parent_val_bpb": current_best, + "stdout_path": "", + "stderr_path": "", + "workspace_path": str(workspace), + "improved_global_best": False, + "error": preflight.reason, + "prompt_path": str(workspace / "prompt.txt"), + "preflight_path": str(workspace / "preflight.json"), + "failure_stage": preflight.stage, + "failure_reason": preflight.reason, + } + self.runner.append_history(history_entry) + self.runner.write_rollout_manifest( + workspace, + { + "step": step, + "starting_state": state.to_dict() if hasattr(state, "to_dict") else { + "id": state_id, + "timestep": getattr(state, "timestep", -1), + }, + "candidate": { + "summary": candidate.summary, + "rationale": candidate.rationale, + "train_py": candidate.train_py, + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, + }, + "prompt": prompt, + "prompt_path": str(workspace / "prompt.txt"), + "raw_response": code, + "raw_response_path": str(workspace / "response.txt"), + "preflight": preflight.to_dict(), + "preflight_path": str(workspace / "preflight.json"), + "evaluation": { + "status": "preflight_failed", + "workspace_path": str(workspace), + "metrics_path": str(metrics_path), + }, + "reward": _FAIL_REWARD, + "correctness": 0.0, + "message": preflight.reason, + "improved_global_best": False, + "failure_stage": preflight.stage, + "failure_reason": preflight.reason, + }, + ) + return self._failure_payload( + reward=_FAIL_REWARD, + raw_score=_FAIL_RAW_SCORE, + msg=preflight.reason, + status="preflight_failed", + ) + + def _run_candidate(self, candidate: PatchCandidate, state: Any, workspace: Path) -> RunResult: if self.bootstrap is None or self.runner is None: raise RuntimeError("AutoResearchRewardEvaluator is not configured.") if _EVALUATION_SLOTS is None: @@ -224,7 +402,7 @@ def _run_candidate(self, candidate: PatchCandidate, state: Any) -> RunResult: gpu_device = _GPU_DEVICE_QUEUE.get() return self.runner.run_candidate( bootstrap=self.bootstrap, - candidate=candidate, + workspace=workspace, step=getattr(state, "timestep", -1) + 1, state_id=getattr(state, "id", "unknown"), gpu_device=gpu_device, @@ -243,6 +421,14 @@ def _build_message(candidate: PatchCandidate, result: RunResult, current_best: f f"candidate_val_bpb={val_bpb} reward={reward:.6f}" ) + def _prompt_for_state(self, state: Any) -> str: + if self.bootstrap is None: + raise RuntimeError("AutoResearchRewardEvaluator is not configured.") + target = self.bootstrap.config.target_val_bpb + if target is None: + target = self._current_best_from_state(state) + return build_prompt_for_state(state, target) + @staticmethod def _current_best_from_state(state: Any) -> float: current_best = getattr(state, "current_best_val_bpb", None) diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index 8c8e3436..e415826b 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -1,6 +1,9 @@ from __future__ import annotations +import ast +import builtins from dataclasses import asdict, dataclass +import difflib from pathlib import Path import json import os @@ -13,11 +16,20 @@ from typing import Any from ttt_autoresearch.config import BootstrapContext, TTTAutoResearchConfig +from ttt_autoresearch.hyperbolic import HyperbolicPool from ttt_autoresearch.runpod import RunPodPool VAL_BPB_RE = re.compile(r"^val_bpb:\s*([-+]?(?:\d+\.?\d*|\.\d+)(?:[eE][-+]?\d+)?)", re.MULTILINE) -ALLOWED_CANDIDATE_KEYS = {"summary", "rationale", "train_py"} +SEARCH_REPLACE_BLOCK_RE = re.compile( + r"<<<<<<< SEARCH\n(.*?)\n=======\n(.*?)\n>>>>>>> REPLACE", + re.DOTALL, +) +VAL_BPB_PRINT_RE = re.compile(r"print\(\s*f?[\"']val_bpb:\s*", re.MULTILINE) +FORWARD_WITH_REDUCTION_RE = re.compile(r"def\s+forward\s*\([^)]*\breduction\s*=", re.MULTILINE) +_KNOWN_PREPARE_CONSTANTS = {"MAX_SEQ_LEN": 2048} +MAX_PATCH_BLOCKS = 3 +MAX_LINES_CHANGED = 160 @dataclass(slots=True) @@ -25,6 +37,25 @@ class PatchCandidate: summary: str rationale: str train_py: str + candidate_format: str + patch_block_count: int + lines_changed: int + + +@dataclass(slots=True) +class PreflightResult: + ok: bool + stage: str + reason: str + details: dict[str, Any] + + def to_dict(self) -> dict[str, Any]: + return { + "ok": self.ok, + "stage": self.stage, + "reason": self.reason, + "details": self.details, + } @dataclass(slots=True) @@ -49,26 +80,64 @@ def to_dict(self) -> dict[str, Any]: def parse_patch_candidate(candidate_json: str) -> PatchCandidate: - try: - payload = json.loads(candidate_json) - except json.JSONDecodeError as exc: - raise ValueError(f"Candidate must be valid JSON: {exc}") from exc - if not isinstance(payload, dict): - raise ValueError("Candidate payload must be a JSON object.") - unknown_keys = set(payload) - ALLOWED_CANDIDATE_KEYS - if unknown_keys: - raise ValueError(f"Candidate may only contain {sorted(ALLOWED_CANDIDATE_KEYS)}. Found {sorted(unknown_keys)}.") - missing = [key for key in ("summary", "rationale", "train_py") if key not in payload] - if missing: - raise ValueError(f"Candidate is missing required keys: {missing}.") - summary = payload["summary"] - rationale = payload["rationale"] - train_py = payload["train_py"] - if not all(isinstance(value, str) for value in (summary, rationale, train_py)): - raise ValueError("Candidate fields summary, rationale, and train_py must all be strings.") - if not train_py.strip(): - raise ValueError("train_py must contain the full replacement file.") - return PatchCandidate(summary=summary.strip(), rationale=rationale.strip(), train_py=train_py) + return parse_patch_candidate_for_state(candidate_json, "") + + +def parse_patch_candidate_for_state(candidate_json: str, current_train_py: str) -> PatchCandidate: + stripped = candidate_json.strip() + if not stripped: + raise ValueError("Candidate must not be empty.") + + updated_train_py, patch_block_count, extracted = apply_search_replace_patch(stripped, current_train_py) + lines_changed = count_lines_changed(current_train_py, updated_train_py) + if lines_changed == 0: + raise ValueError("Patch did not change train.py.") + return PatchCandidate( + summary="search_replace_patch_candidate", + rationale="model returned search/replace patch", + train_py=updated_train_py, + candidate_format="search_replace_patch_extracted" if extracted else "search_replace_patch", + patch_block_count=patch_block_count, + lines_changed=lines_changed, + ) + + +def apply_search_replace_patch(patch_text: str, current_train_py: str) -> tuple[str, int, bool]: + blocks = list(SEARCH_REPLACE_BLOCK_RE.finditer(patch_text)) + if not blocks: + raise ValueError("Candidate must contain one or more SEARCH/REPLACE patch blocks.") + if len(blocks) > MAX_PATCH_BLOCKS: + raise ValueError(f"Candidate must contain at most {MAX_PATCH_BLOCKS} SEARCH/REPLACE blocks.") + + updated = current_train_py + for match in blocks: + search_text = match.group(1) + replace_text = match.group(2) + if not search_text: + raise ValueError("SEARCH block must not be empty.") + occurrences = updated.count(search_text) + if occurrences == 0: + raise ValueError("SEARCH block did not match the current train.py.") + if occurrences > 1: + raise ValueError("SEARCH block matched multiple locations. Make the patch more specific.") + updated = updated.replace(search_text, replace_text, 1) + + extracted = _has_non_block_wrapper_text(patch_text, blocks) + return updated, len(blocks), extracted + + +def count_lines_changed(previous_text: str, updated_text: str) -> int: + changed = 0 + for line in difflib.unified_diff( + previous_text.splitlines(), + updated_text.splitlines(), + lineterm="", + ): + if line.startswith(("---", "+++", "@@")): + continue + if line.startswith(("+", "-")): + changed += 1 + return changed def parse_val_bpb(stdout: str) -> float | None: @@ -83,6 +152,7 @@ def __init__(self, repo_root: Path, config: TTTAutoResearchConfig, run_dir: Path self.repo_root = repo_root self.config = config self.run_dir = run_dir + self._hyperbolic_pool: HyperbolicPool | None = None self._runpod_pool: RunPodPool | None = None self.run_dir.mkdir(parents=True, exist_ok=True) (self.run_dir / "baseline").mkdir(exist_ok=True) @@ -137,18 +207,111 @@ def run_baseline(self, bootstrap: BootstrapContext | None = None) -> RunResult: self._write_json(self.run_dir / "baseline.json", result.to_dict()) return result + def prepare_candidate_workspace( + self, + candidate: PatchCandidate, + step: int, + *, + prefix: str = "candidate", + ) -> Path: + workspace = self.run_dir / "candidates" / f"{step:04d}_{prefix}_{uuid.uuid4().hex[:8]}" + self._copy_repo(workspace) + (workspace / "train.py").write_text(candidate.train_py, encoding="utf-8") + (workspace / "applied_train.py").write_text(candidate.train_py, encoding="utf-8") + return workspace + + def preflight_candidate(self, workspace: Path, candidate: PatchCandidate) -> PreflightResult: + train_path = workspace / "train.py" + source = train_path.read_text(encoding="utf-8") + if candidate.patch_block_count > MAX_PATCH_BLOCKS: + return PreflightResult( + ok=False, + stage="edit_scope", + reason=f"Candidate used {candidate.patch_block_count} patch blocks; limit is {MAX_PATCH_BLOCKS}.", + details={"patch_block_count": candidate.patch_block_count, "max_patch_blocks": MAX_PATCH_BLOCKS}, + ) + if candidate.lines_changed > MAX_LINES_CHANGED: + return PreflightResult( + ok=False, + stage="edit_scope", + reason=f"Candidate changed {candidate.lines_changed} lines; limit is {MAX_LINES_CHANGED}.", + details={"lines_changed": candidate.lines_changed, "max_lines_changed": MAX_LINES_CHANGED}, + ) + try: + module = ast.parse(source, filename=str(train_path)) + except SyntaxError as exc: + return PreflightResult( + ok=False, + stage="syntax", + reason="train.py does not parse as Python", + details={ + "lineno": exc.lineno, + "offset": exc.offset, + "text": exc.text, + "message": exc.msg, + }, + ) + + undefined_name = _find_top_level_undefined_name(module) + if undefined_name is not None: + return PreflightResult( + ok=False, + stage="top_level_names", + reason=f"Top-level code references undefined name {undefined_name!r}.", + details={"name": undefined_name}, + ) + + if not VAL_BPB_PRINT_RE.search(source): + return PreflightResult( + ok=False, + stage="summary_output", + reason="train.py no longer prints a final val_bpb summary line.", + details={"required_pattern": "print(... val_bpb: ...)"}, + ) + + if not FORWARD_WITH_REDUCTION_RE.search(source): + return PreflightResult( + ok=False, + stage="forward_signature", + reason="train.py no longer defines a forward(...) with a reduction parameter.", + details={"required_pattern": "def forward(... reduction=...)"}, + ) + + divisibility = _check_batch_divisibility(module) + if not divisibility.ok: + return divisibility + + compiled = subprocess.run( + [sys.executable, "-m", "py_compile", str(train_path)], + cwd=workspace, + text=True, + capture_output=True, + check=False, + ) + if compiled.returncode != 0: + return PreflightResult( + ok=False, + stage="py_compile", + reason="python -m py_compile failed.", + details={"stdout": compiled.stdout, "stderr": compiled.stderr}, + ) + + return PreflightResult( + ok=True, + stage="ok", + reason="Preflight checks passed.", + details=divisibility.details, + ) + def run_candidate( self, bootstrap: BootstrapContext, - candidate: PatchCandidate, + workspace: Path, step: int, state_id: str, gpu_device: str | None = None, ) -> RunResult: - workspace = self.run_dir / "candidates" / f"{step:04d}_{uuid.uuid4().hex[:8]}" - self._copy_repo(workspace) - (workspace / "train.py").write_text(candidate.train_py, encoding="utf-8") - result = self._execute_workspace( + return self._execute_workspace( workspace=workspace, command_template=self.config.candidate_command_override, bootstrap=bootstrap, @@ -156,7 +319,6 @@ def run_candidate( state_id=state_id, gpu_device=gpu_device, ) - return result def create_candidate_artifact_dir(self, step: int, prefix: str = "candidate") -> Path: label = prefix.replace(" ", "_") @@ -215,6 +377,9 @@ def write_json_artifact(self, path: Path, payload: dict[str, Any]) -> Path: return path def close(self) -> None: + if self._hyperbolic_pool is not None: + self._hyperbolic_pool.close() + self._hyperbolic_pool = None if self._runpod_pool is not None: self._runpod_pool.close() self._runpod_pool = None @@ -239,7 +404,7 @@ def _execute_workspace( ) -> RunResult: command = self._resolve_command(command_template, workspace, bootstrap, label, state_id) env = bootstrap.subprocess_env() if bootstrap else dict(os.environ) - if gpu_device is not None and self.config.execution_backend == "local": + if gpu_device is not None and self.config.execution_backend in {"local", "hyperbolic"}: env["CUDA_VISIBLE_DEVICES"] = gpu_device stdout_path = workspace / "stdout.log" stderr_path = workspace / "stderr.log" @@ -263,6 +428,24 @@ def _execute_workspace( returncode = None else: status = "success" if returncode == 0 else "crash" + elif self.config.execution_backend == "hyperbolic": + pool = self._get_hyperbolic_pool() + remote_result = pool.execute_workspace( + workspace=workspace, + command=command, + env=env, + timeout_sec=self.config.timeout_sec, + label=label, + ) + stdout = remote_result.stdout + stderr = remote_result.stderr + returncode = remote_result.returncode + elapsed_sec = remote_result.elapsed_sec + if returncode == 124: + status = "timeout" + returncode = None + else: + status = "success" if returncode == 0 else "crash" else: try: proc = subprocess.run( @@ -314,6 +497,11 @@ def _get_runpod_pool(self) -> RunPodPool: self._runpod_pool = RunPodPool(repo_root=self.repo_root, run_dir=self.run_dir, config=self.config) return self._runpod_pool + def _get_hyperbolic_pool(self) -> HyperbolicPool: + if self._hyperbolic_pool is None: + self._hyperbolic_pool = HyperbolicPool(repo_root=self.repo_root, run_dir=self.run_dir, config=self.config) + return self._hyperbolic_pool + def _read_val_bpb(self, stdout: str, metrics_path: Path) -> float | None: direct = parse_val_bpb(stdout) if direct is not None: @@ -371,3 +559,212 @@ def _load_baseline_train_py(self) -> str: if candidate.exists(): return candidate.read_text(encoding="utf-8") raise FileNotFoundError("Could not locate baseline train.py in either the run directory or repo root.") + + +def _check_batch_divisibility(module: ast.Module) -> PreflightResult: + env: dict[str, int] = dict(_KNOWN_PREPARE_CONSTANTS) + tracked = {"TOTAL_BATCH_SIZE", "DEVICE_BATCH_SIZE", "MAX_SEQ_LEN", "tokens_per_fwdbwd"} + + for stmt in module.body: + if not isinstance(stmt, ast.Assign) or len(stmt.targets) != 1: + continue + target = stmt.targets[0] + if not isinstance(target, ast.Name) or target.id not in tracked: + continue + value = _safe_eval_int_expr(stmt.value, env) + if value is not None: + env[target.id] = value + + missing = [name for name in ("TOTAL_BATCH_SIZE", "DEVICE_BATCH_SIZE", "MAX_SEQ_LEN") if name not in env] + if missing: + return PreflightResult( + ok=False, + stage="batch_divisibility", + reason=f"Could not statically resolve required batch constants: {missing}.", + details={"resolved": env}, + ) + + tokens_per_fwdbwd = env.get("tokens_per_fwdbwd", env["DEVICE_BATCH_SIZE"] * env["MAX_SEQ_LEN"]) + if tokens_per_fwdbwd <= 0: + return PreflightResult( + ok=False, + stage="batch_divisibility", + reason="tokens_per_fwdbwd must be positive.", + details={"resolved": env}, + ) + if env["TOTAL_BATCH_SIZE"] % tokens_per_fwdbwd != 0: + return PreflightResult( + ok=False, + stage="batch_divisibility", + reason="TOTAL_BATCH_SIZE is not divisible by DEVICE_BATCH_SIZE * MAX_SEQ_LEN.", + details={ + "TOTAL_BATCH_SIZE": env["TOTAL_BATCH_SIZE"], + "DEVICE_BATCH_SIZE": env["DEVICE_BATCH_SIZE"], + "MAX_SEQ_LEN": env["MAX_SEQ_LEN"], + "tokens_per_fwdbwd": tokens_per_fwdbwd, + }, + ) + + return PreflightResult( + ok=True, + stage="batch_divisibility", + reason="Batch-size divisibility check passed.", + details={ + "TOTAL_BATCH_SIZE": env["TOTAL_BATCH_SIZE"], + "DEVICE_BATCH_SIZE": env["DEVICE_BATCH_SIZE"], + "MAX_SEQ_LEN": env["MAX_SEQ_LEN"], + "tokens_per_fwdbwd": tokens_per_fwdbwd, + "grad_accum_steps": env["TOTAL_BATCH_SIZE"] // tokens_per_fwdbwd, + }, + ) + + +def _safe_eval_int_expr(node: ast.AST, env: dict[str, int]) -> int | None: + if isinstance(node, ast.Constant) and isinstance(node.value, int): + return int(node.value) + if isinstance(node, ast.Name): + return env.get(node.id) + if isinstance(node, ast.UnaryOp) and isinstance(node.op, (ast.UAdd, ast.USub)): + operand = _safe_eval_int_expr(node.operand, env) + if operand is None: + return None + return operand if isinstance(node.op, ast.UAdd) else -operand + if isinstance(node, ast.BinOp): + left = _safe_eval_int_expr(node.left, env) + right = _safe_eval_int_expr(node.right, env) + if left is None or right is None: + return None + if isinstance(node.op, ast.Add): + return left + right + if isinstance(node.op, ast.Sub): + return left - right + if isinstance(node.op, ast.Mult): + return left * right + if isinstance(node.op, ast.FloorDiv): + return left // right if right != 0 else None + if isinstance(node.op, ast.Div): + return left // right if right != 0 and left % right == 0 else None + if isinstance(node.op, ast.Mod): + return left % right if right != 0 else None + if isinstance(node.op, ast.Pow): + return left ** right + return None + + +def _find_top_level_undefined_name(module: ast.Module) -> str | None: + defined = set(dir(builtins)) | { + "__name__", + "__file__", + "__package__", + "__spec__", + "__builtins__", + } + defined.update(_collect_defined_names(module.body)) + + for stmt in module.body: + for name in _top_level_loaded_names(stmt): + if name not in defined: + return name + return None + + +def _top_level_loaded_names(stmt: ast.stmt) -> set[str]: + loaded: set[str] = set() + + def visit(node: ast.AST) -> None: + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + for decorator in node.decorator_list: + visit(decorator) + for default in node.args.defaults: + visit(default) + for default in node.args.kw_defaults: + if default is not None: + visit(default) + if node.returns is not None: + visit(node.returns) + return + if isinstance(node, ast.ClassDef): + for decorator in node.decorator_list: + visit(decorator) + for base in node.bases: + visit(base) + for keyword in node.keywords: + visit(keyword.value) + return + if isinstance(node, (ast.Lambda, ast.ListComp, ast.SetComp, ast.DictComp, ast.GeneratorExp)): + return + if isinstance(node, ast.Name) and isinstance(node.ctx, ast.Load): + loaded.add(node.id) + for child in ast.iter_child_nodes(node): + visit(child) + + visit(stmt) + return loaded + + +def _names_defined_by_stmt(stmt: ast.stmt) -> set[str]: + names: set[str] = set() + + def add_target(target: ast.AST) -> None: + if isinstance(target, ast.Name): + names.add(target.id) + return + if isinstance(target, (ast.Tuple, ast.List)): + for elt in target.elts: + add_target(elt) + + if isinstance(stmt, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)): + names.add(stmt.name) + elif isinstance(stmt, (ast.Assign, ast.AnnAssign, ast.AugAssign)): + targets = stmt.targets if isinstance(stmt, ast.Assign) else [stmt.target] + for target in targets: + add_target(target) + elif isinstance(stmt, (ast.For, ast.AsyncFor)): + add_target(stmt.target) + elif isinstance(stmt, ast.With): + for item in stmt.items: + if item.optional_vars is not None: + add_target(item.optional_vars) + elif isinstance(stmt, ast.Import): + for alias in stmt.names: + names.add(alias.asname or alias.name.split(".")[0]) + elif isinstance(stmt, ast.ImportFrom): + for alias in stmt.names: + names.add(alias.asname or alias.name) + return names + + +def _collect_defined_names(statements: list[ast.stmt]) -> set[str]: + names: set[str] = set() + for stmt in statements: + names.update(_names_defined_by_stmt(stmt)) + for child_block in _child_statement_blocks(stmt): + names.update(_collect_defined_names(child_block)) + return names + + +def _child_statement_blocks(stmt: ast.stmt) -> list[list[ast.stmt]]: + blocks: list[list[ast.stmt]] = [] + for attr in ("body", "orelse", "finalbody"): + value = getattr(stmt, attr, None) + if isinstance(value, list): + blocks.append(value) + handlers = getattr(stmt, "handlers", None) + if handlers: + for handler in handlers: + blocks.append(handler.body) + return blocks + + +def _has_non_block_wrapper_text(response_text: str, blocks: list[re.Match[str]]) -> bool: + pieces: list[str] = [] + cursor = 0 + for match in blocks: + pieces.append(response_text[cursor:match.start()]) + cursor = match.end() + pieces.append(response_text[cursor:]) + wrapper = "".join(pieces).strip() + if not wrapper: + return False + wrapper = wrapper.replace("```", "").strip() + return bool(wrapper) diff --git a/ttt_autoresearch/runpod.py b/ttt_autoresearch/runpod.py index fbba5f9a..32fe82bb 100644 --- a/ttt_autoresearch/runpod.py +++ b/ttt_autoresearch/runpod.py @@ -321,7 +321,10 @@ def _bootstrap_pod(self, pod: RunPodPod) -> None: "set -euo pipefail", f"rm -rf {shlex.quote(repo_root)}", f"mkdir -p {shlex.quote(repo_root)}", - f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)} --strip-components=1", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)}", + f"if [ ! -f {shlex.quote(repo_root)}/pyproject.toml ]; then rm -rf {shlex.quote(repo_root)}/* && tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)} --strip-components=1; fi", + f"test -f {shlex.quote(repo_root)}/pyproject.toml", + f"test -f {shlex.quote(repo_root)}/prepare.py", ] script_lines.extend(rendered) self._run_ssh( @@ -357,7 +360,9 @@ def _run_workspace_on_pod( "set -uo pipefail", f"rm -rf {shlex.quote(remote_workspace)}", f"mkdir -p {shlex.quote(remote_workspace)}", - f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)} --strip-components=1", + f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)}", + f"if [ ! -f {shlex.quote(remote_workspace)}/train.py ]; then rm -rf {shlex.quote(remote_workspace)}/* && tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(remote_workspace)} --strip-components=1; fi", + f"test -f {shlex.quote(remote_workspace)}/train.py", f"cd {shlex.quote(remote_workspace)}", *env_lines, f"timeout --kill-after=30s {timeout_sec}s {command_str} > stdout.log 2> stderr.log", @@ -453,7 +458,7 @@ def _download_text_file(self, pod: RunPodPod, remote_path: str) -> str: def _run_ssh(self, pod: RunPodPod, script: str, timeout: int, check: bool) -> subprocess.CompletedProcess[str]: try: completed = subprocess.run( - self._ssh_base_args(pod) + ["bash", "-lc", script], + self._ssh_base_args(pod) + [f"bash -lc {shlex.quote(script)}"], text=True, capture_output=True, timeout=timeout, @@ -555,6 +560,8 @@ def _should_skip(rel: Path) -> bool: return False if parts[0] in {".git", "runs", "__pycache__", ".pytest_cache", ".venv"}: return True + if rel.name in {"prompt.txt", "response.txt"}: + return True return rel.suffix in {".pyc", ".pyo"} def _write_pool_state(self) -> None: diff --git a/uv.lock b/uv.lock index 027f6a81..53d0e572 100644 --- a/uv.lock +++ b/uv.lock @@ -202,7 +202,8 @@ dependencies = [ { name = "requests" }, { name = "rustbpe" }, { name = "tiktoken" }, - { name = "torch" }, + { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'darwin'" }, + { name = "torch", version = "2.9.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "sys_platform != 'darwin'" }, { name = "ttt-discover" }, ] @@ -216,7 +217,8 @@ requires-dist = [ { name = "requests", specifier = ">=2.32.0" }, { name = "rustbpe", specifier = ">=0.1.0" }, { name = "tiktoken", specifier = ">=0.11.0" }, - { name = "torch", specifier = "==2.9.1", index = "https://download.pytorch.org/whl/cu128" }, + { name = "torch", marker = "sys_platform != 'darwin'", specifier = "==2.9.1", index = "https://download.pytorch.org/whl/cu128" }, + { name = "torch", marker = "sys_platform == 'darwin'", specifier = "==2.9.1" }, { name = "ttt-discover", git = "https://github.com/test-time-training/discover?rev=5df1a0ee9b04272ca33de0101ae64dd499e63f29" }, ] @@ -1508,7 +1510,7 @@ name = "nvidia-cudnn-cu12" version = "9.10.2.21" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/fa/41/e79269ce215c857c935fd86bcfe91a451a584dfc27f1e068f568b9ad1ab7/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:c9132cc3f8958447b4910a1720036d9eff5928cc3179b0a51fb6d167c6cc87d8", size = 705026878, upload-time = "2025-06-06T21:52:51.348Z" }, @@ -1520,7 +1522,7 @@ name = "nvidia-cufft-cu12" version = "11.3.3.83" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/60/bc/7771846d3a0272026c416fbb7e5f4c1f146d6d80704534d0b187dd6f4800/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:848ef7224d6305cdb2a4df928759dca7b1201874787083b6e7550dd6765ce69a", size = 193109211, upload-time = "2025-03-07T01:44:56.873Z" }, @@ -1550,9 +1552,9 @@ name = "nvidia-cusolver-cu12" version = "11.7.3.90" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12" }, - { name = "nvidia-cusparse-cu12" }, - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/c8/32/f7cd6ce8a7690544d084ea21c26e910a97e077c9b7f07bf5de623ee19981/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_aarch64.whl", hash = "sha256:db9ed69dbef9715071232caa9b69c52ac7de3a95773c2db65bdba85916e4e5c0", size = 267229841, upload-time = "2025-03-07T01:46:54.356Z" }, @@ -1564,7 +1566,7 @@ name = "nvidia-cusparse-cu12" version = "12.5.8.93" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/bc/f7/cd777c4109681367721b00a106f491e0d0d15cfa1fd59672ce580ce42a97/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b6c161cb130be1a07a27ea6923df8141f3c295852f4b260c65f18f3e0a091dc", size = 288117129, upload-time = "2025-03-07T01:47:40.407Z" }, @@ -2698,15 +2700,56 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" }, ] +[[package]] +name = "torch" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'darwin'", + "python_full_version < '3.12' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "filelock", marker = "sys_platform == 'darwin'" }, + { name = "fsspec", marker = "sys_platform == 'darwin'" }, + { name = "jinja2", marker = "sys_platform == 'darwin'" }, + { name = "networkx", marker = "sys_platform == 'darwin'" }, + { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform == 'darwin'" }, + { name = "sympy", marker = "sys_platform == 'darwin'" }, + { name = "typing-extensions", marker = "sys_platform == 'darwin'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/ce/7d251155a783fb2c1bb6837b2b7023c622a2070a0a72726ca1df47e7ea34/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475", size = 74463887, upload-time = "2025-11-12T15:20:36.611Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, + { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, + { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, + { url = "https://files.pythonhosted.org/packages/48/50/c4b5112546d0d13cc9eaa1c732b823d676a9f49ae8b6f97772f795874a03/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a", size = 74433245, upload-time = "2025-11-12T15:22:39.027Z" }, + { url = "https://files.pythonhosted.org/packages/bd/b2/2d15a52516b2ea3f414643b8de68fa4cb220d3877ac8b1028c83dc8ca1c4/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c", size = 74823558, upload-time = "2025-11-12T15:22:43.392Z" }, +] + [[package]] name = "torch" version = "2.9.1+cu128" source = { registry = "https://download.pytorch.org/whl/cu128" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform == 'linux'", + "python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform == 'linux'", + "python_full_version >= '3.12' and python_full_version < '3.14' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.12' and sys_platform == 'win32'", + "python_full_version < '3.12' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform != 'darwin' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", +] dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "jinja2" }, - { name = "networkx" }, + { name = "filelock", marker = "sys_platform != 'darwin'" }, + { name = "fsspec", marker = "sys_platform != 'darwin'" }, + { name = "jinja2", marker = "sys_platform != 'darwin'" }, + { name = "networkx", marker = "sys_platform != 'darwin'" }, { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-cuda-cupti-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-cuda-nvrtc-cu12", marker = "sys_platform == 'linux'" }, @@ -2722,10 +2765,10 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-nvshmem-cu12", marker = "sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12'" }, - { name = "sympy" }, + { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform != 'darwin'" }, + { name = "sympy", marker = "sys_platform != 'darwin'" }, { name = "triton", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions" }, + { name = "typing-extensions", marker = "sys_platform != 'darwin'" }, ] wheels = [ { url = "https://download.pytorch.org/whl/cu128/torch-2.9.1%2Bcu128-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:cf4ad82430824a80a9f398e29369524ed26c152cf00c2c12002e5400b35e260d" }, @@ -2818,7 +2861,8 @@ dependencies = [ { name = "termcolor" }, { name = "tiktoken" }, { name = "tinker" }, - { name = "torch" }, + { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'darwin'" }, + { name = "torch", version = "2.9.1+cu128", source = { registry = "https://download.pytorch.org/whl/cu128" }, marker = "sys_platform != 'darwin'" }, { name = "tqdm" }, { name = "transformers" }, { name = "wandb" }, From ded60be2bb76bb747847c4ebe873a144a5822e36 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Wed, 11 Mar 2026 23:25:41 +1100 Subject: [PATCH 10/17] Relax rollout gating and harden remote execution --- configs/ttt_discover_autoresearch.yaml | 6 +- tests/test_hyperbolic.py | 37 +++++++-- tests/test_reward.py | 14 ++-- tests/test_runner.py | 45 +++++++++- ttt_autoresearch/hyperbolic.py | 49 +++++++---- ttt_autoresearch/reward.py | 110 ------------------------- ttt_autoresearch/runner.py | 27 ++---- 7 files changed, 125 insertions(+), 163 deletions(-) diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index 862825f8..fd28d46b 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -34,11 +34,11 @@ gpu_devices: - "6" - "7" execution_backend: hyperbolic -hyperbolic_ssh_host: 85.234.79.188 +hyperbolic_ssh_host: 85.234.79.51 hyperbolic_ssh_port: 22 -hyperbolic_ssh_user: ubuntu +hyperbolic_ssh_user: user hyperbolic_ssh_private_key_path: null -hyperbolic_repo_root: /home/ubuntu/autoresearch +hyperbolic_repo_root: /home/user/autoresearch hyperbolic_prepare_num_shards: 10 hyperbolic_bootstrap_timeout_sec: 7200 hyperbolic_bootstrap_commands: null diff --git a/tests/test_hyperbolic.py b/tests/test_hyperbolic.py index c84ec9d9..3ede5a9c 100644 --- a/tests/test_hyperbolic.py +++ b/tests/test_hyperbolic.py @@ -140,14 +140,35 @@ def test_detached_launch_refuses_active_remote_runs(self) -> None: pool.config = config def fake_run_ssh(command: str, timeout: int, check: bool): - if "pgrep -af" in command: - return subprocess.CompletedProcess( - args=["ssh"], - returncode=12, - stdout="Detected active AutoResearch processes already running on the Hyperbolic node.\nControllers:\n123 python run_ttt_discover.py", - stderr="", - ) - raise AssertionError("unexpected remote command") + return subprocess.CompletedProcess( + args=["ssh"], + returncode=12, + stdout="Detected active AutoResearch processes already running on the Hyperbolic node.\nControllers:\n123 python run_ttt_discover.py", + stderr="", + ) + + pool._run_ssh = fake_run_ssh # type: ignore[method-assign] + with self.assertRaises(HyperbolicError): + pool._assert_no_active_remote_runs() + + def test_detached_launch_refuses_alt_train_command_shapes(self) -> None: + config = TTTAutoResearchConfig( + execution_backend="hyperbolic", + hyperbolic_ssh_host="1.2.3.4", + ).normalized(Path(".")) + pool = object.__new__(HyperbolicPool) + pool.config = config + + def fake_run_ssh(command: str, timeout: int, check: bool): + return subprocess.CompletedProcess( + args=["ssh"], + returncode=12, + stdout=( + "Detected active AutoResearch processes already running on the Hyperbolic node.\n" + "999 uv run python /home/ubuntu/autoresearch/train.py\n" + ), + stderr="", + ) pool._run_ssh = fake_run_ssh # type: ignore[method-assign] with self.assertRaises(HyperbolicError): diff --git a/tests/test_reward.py b/tests/test_reward.py index f8442849..c9a1381c 100644 --- a/tests/test_reward.py +++ b/tests/test_reward.py @@ -168,7 +168,7 @@ def test_parse_patch_candidate_accepts_search_replace_patch(self) -> None: self.assertEqual(candidate.summary, "search_replace_patch_candidate") self.assertEqual(candidate.train_py, "print(1)\n") - def test_preflight_failed_candidate_is_persisted_to_history_and_manifest(self) -> None: + def test_invalid_batch_patch_runs_and_crashes_at_runtime(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) original = ( @@ -207,16 +207,16 @@ def test_preflight_failed_candidate_is_persisted_to_history_and_manifest(self) - payload = "<<<<<<< SEARCH\nTOTAL_BATCH_SIZE = 8\n=======\nTOTAL_BATCH_SIZE = 7\n>>>>>>> REPLACE" result = evaluator.get_reward(payload, state) - self.assertEqual(result["metrics"]["candidate_status"], "preflight_failed") + self.assertEqual(result["metrics"]["candidate_status"], "crash") history_path = Path(config.run_dir) / "history.jsonl" history = json.loads(history_path.read_text(encoding="utf-8").strip()) - self.assertEqual(history["status"], "preflight_failed") - self.assertEqual(history["failure_stage"], "batch_divisibility") + self.assertEqual(history["status"], "crash") + self.assertEqual(history["failure_stage"], "runtime") manifest_path = next((Path(config.run_dir) / "candidates").glob("*/rollout_manifest.json")) manifest = json.loads(manifest_path.read_text(encoding="utf-8")) - self.assertEqual(manifest["evaluation"]["status"], "preflight_failed") - self.assertEqual(manifest["preflight"]["stage"], "batch_divisibility") - self.assertTrue((Path(manifest["preflight_path"])).exists()) + self.assertEqual(manifest["evaluation"]["status"], "crash") + self.assertIn("stdout_path", manifest["evaluation"]) + self.assertIn("stderr_path", manifest["evaluation"]) def test_concurrent_reward_calls_serialize_inner_evaluations(self) -> None: class FakeRunner: diff --git a/tests/test_runner.py b/tests/test_runner.py index ec511d3b..da8fbd91 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -1,5 +1,7 @@ from __future__ import annotations +import ast +import json from pathlib import Path import tempfile import unittest @@ -7,7 +9,12 @@ import os from ttt_autoresearch.config import TTTAutoResearchConfig -from ttt_autoresearch.runner import AutoResearchRunner, parse_patch_candidate_for_state, parse_val_bpb +from ttt_autoresearch.runner import ( + AutoResearchRunner, + _find_top_level_undefined_name, + parse_patch_candidate_for_state, + parse_val_bpb, +) class RunnerTests(unittest.TestCase): @@ -63,6 +70,34 @@ def test_runner_reads_metric_and_status(self) -> None: self.assertAlmostEqual(result.val_bpb, 1.25) self.assertTrue((Path(config.run_dir) / "baseline" / "train.py").exists()) + def test_runner_ignores_malformed_metrics_json_when_stdout_has_val_bpb(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text("print('ok')\n", encoding="utf-8") + fixture = ( + "from pathlib import Path\n" + "print('val_bpb: 0.876543')\n" + "Path('metrics.json').write_text('{not-json', encoding='utf-8')\n" + ) + fixtures = root / "tests" / "fixtures" + fixtures.mkdir(parents=True) + (fixtures / "bad_metrics.py").write_text(fixture, encoding="utf-8") + config = TTTAutoResearchConfig( + execution_backend="local", + timeout_sec=5, + baseline_command_override=[sys.executable, "tests/fixtures/bad_metrics.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.0) + result = runner.run_baseline(bootstrap=bootstrap) + self.assertEqual(result.status, "success") + self.assertAlmostEqual(result.val_bpb, 0.876543) + metrics = json.loads(result.metrics_path.read_text(encoding="utf-8")) + self.assertAlmostEqual(metrics["val_bpb"], 0.876543) + self.assertIn("metrics_json_error", metrics) + def test_preflight_rejects_invalid_batch_divisibility(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) @@ -91,6 +126,14 @@ def test_preflight_rejects_invalid_batch_divisibility(self) -> None: self.assertFalse(preflight.ok) self.assertEqual(preflight.stage, "batch_divisibility") + def test_top_level_undefined_name_does_not_treat_nested_bindings_as_module_scope(self) -> None: + module = ast.parse( + "if False:\n" + " x = 1\n" + "print(x)\n" + ) + self.assertEqual(_find_top_level_undefined_name(module), "x") + def test_preflight_rejects_missing_val_bpb_summary(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) diff --git a/ttt_autoresearch/hyperbolic.py b/ttt_autoresearch/hyperbolic.py index 9bc02f9d..fac1f53c 100644 --- a/ttt_autoresearch/hyperbolic.py +++ b/ttt_autoresearch/hyperbolic.py @@ -103,7 +103,7 @@ def launch_detached_controller(self) -> dict[str, str]: f" echo 'Controller already running at {remote_pid_path}'", " exit 1", "fi", - 'export PATH="$HOME/.local/bin:$PATH"', + f'export PATH="{self._remote_uv_bin_dir()}:$HOME/.local/bin:$PATH"', *[f"export {name}={shlex.quote(value)}" for name, value in sorted(forwarded_env.items())], f"cd {shlex.quote(self.config.hyperbolic_repo_root)}", "nohup bash -lc " @@ -155,23 +155,30 @@ def launch_detached_controller(self) -> dict[str, str]: def _assert_no_active_remote_runs(self) -> None: repo_root = self.config.hyperbolic_repo_root - controller_pattern = f"run_ttt_discover.py --config {repo_root}/runs/launches/" - train_pattern = f"{repo_root}/.venv/bin/python3 train.py" script = "\n".join( [ "set -euo pipefail", - f"controller_matches=$(pgrep -af {shlex.quote(controller_pattern)} || true)", - f"train_matches=$(pgrep -af {shlex.quote(train_pattern)} || true)", - 'if [ -n "$controller_matches" ] || [ -n "$train_matches" ]; then', + "matches=$(python3 - <<'PY'\n" + "import subprocess\n" + f"repo_root = {repo_root!r}\n" + "rows = subprocess.run(['ps', '-eo', 'pid=,args='], text=True, capture_output=True, check=True).stdout.splitlines()\n" + "hits = []\n" + "for row in rows:\n" + " row = row.strip()\n" + " if not row:\n" + " continue\n" + " pid, _, args = row.partition(' ')\n" + " if repo_root not in args:\n" + " continue\n" + " is_controller = 'run_ttt_discover.py' in args and f'{repo_root}/runs/launches/' in args\n" + " is_train = 'train.py' in args and 'run_ttt_discover.py' not in args\n" + " if is_controller or is_train:\n" + " hits.append(row)\n" + "print('\\n'.join(hits))\n" + "PY\n)", + 'if [ -n "$matches" ]; then', " echo 'Detected active AutoResearch processes already running on the Hyperbolic node.'", - " if [ -n \"$controller_matches\" ]; then", - " echo 'Controllers:'", - " echo \"$controller_matches\"", - " fi", - " if [ -n \"$train_matches\" ]; then", - " echo 'Train jobs:'", - " echo \"$train_matches\"", - " fi", + " echo \"$matches\"", " exit 12", "fi", ] @@ -243,7 +250,9 @@ def _bootstrap_node(self) -> None: self._upload_file(self.repo_archive_path, remote_archive) repo_root = self.config.hyperbolic_repo_root bootstrap_commands = self.config.hyperbolic_bootstrap_commands or [ - "python3 -m pip install --user --upgrade uv", + f'if ! command -v uv >/dev/null 2>&1 && [ ! -x "{self._remote_uv_bin_dir()}/uv" ]; then ' + "curl -LsSf https://astral.sh/uv/install.sh | sh; " + "fi", "cd {repo_root} && uv sync", "cd {repo_root} && uv run prepare.py --num-shards {prepare_num_shards}", ] @@ -256,7 +265,7 @@ def _bootstrap_node(self) -> None: ] script_lines = [ "set -euo pipefail", - 'export PATH="$HOME/.local/bin:$PATH"', + f'export PATH="{self._remote_uv_bin_dir()}:$HOME/.local/bin:$PATH"', f"rm -rf {shlex.quote(repo_root)}", f"mkdir -p {shlex.quote(repo_root)}", f"tar -xzf {shlex.quote(remote_archive)} -C {shlex.quote(repo_root)}", @@ -271,6 +280,14 @@ def _bootstrap_node(self) -> None: check=True, ) + @staticmethod + def _remote_uv_root() -> str: + return "$HOME/.local" + + @classmethod + def _remote_uv_bin_dir(cls) -> str: + return f"{cls._remote_uv_root()}/bin" + def _run_workspace_on_node( self, workspace: Path, diff --git a/ttt_autoresearch/reward.py b/ttt_autoresearch/reward.py index 978401c6..d9655a9a 100644 --- a/ttt_autoresearch/reward.py +++ b/ttt_autoresearch/reward.py @@ -11,7 +11,6 @@ from ttt_autoresearch.runner import ( AutoResearchRunner, PatchCandidate, - PreflightResult, RunResult, parse_patch_candidate_for_state, ) @@ -87,17 +86,6 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: workspace = self.runner.prepare_candidate_workspace(candidate, step=step) (workspace / "prompt.txt").write_text(prompt, encoding="utf-8") (workspace / "response.txt").write_text(code, encoding="utf-8") - preflight = self.runner.preflight_candidate(workspace, candidate) - self.runner.write_json_artifact(workspace / "preflight.json", preflight.to_dict()) - if not preflight.ok: - return self._persist_preflight_failed_candidate( - candidate=candidate, - code=code, - prompt=prompt, - state=state, - workspace=workspace, - preflight=preflight, - ) result = self._run_candidate(candidate, state, workspace) current_best = self._current_best_from_state(state) @@ -130,7 +118,6 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "workspace_path": str(result.workspace_path), "improved_global_best": improved_global_best, "prompt_path": str(result.workspace_path / "prompt.txt"), - "preflight_path": str(result.workspace_path / "preflight.json"), "failure_stage": "runtime" if result.status != "success" else "", "failure_reason": result.status if result.status != "success" else "", } @@ -155,8 +142,6 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "prompt_path": str(result.workspace_path / "prompt.txt"), "raw_response": code, "raw_response_path": str(result.workspace_path / "response.txt"), - "preflight": preflight.to_dict(), - "preflight_path": str(result.workspace_path / "preflight.json"), "evaluation": result.to_dict(), "reward": reward, "correctness": correctness, @@ -190,7 +175,6 @@ def get_reward(self, code: str, state: Any) -> dict[str, Any]: "stderr_path": str(result.stderr_path), "improved_global_best": improved_global_best, "prompt": prompt, - "preflight": preflight.to_dict(), }, } @@ -293,100 +277,6 @@ def _persist_invalid_candidate(self, code: str, prompt: str, state: Any, error_m status="invalid_candidate", ) - def _persist_preflight_failed_candidate( - self, - *, - candidate: PatchCandidate, - code: str, - prompt: str, - state: Any, - workspace: Path, - preflight: PreflightResult, - ) -> dict[str, Any]: - if self.runner is None: - raise RuntimeError("AutoResearchRewardEvaluator is not configured.") - step = getattr(state, "timestep", -1) + 1 - state_id = getattr(state, "id", "unknown") - current_best = self._current_best_from_state(state) - metrics_path = workspace / "metrics.json" - with _ARTIFACT_LOCK: - self.runner.write_json_artifact( - metrics_path, - { - "candidate_status": "preflight_failed", - "error": preflight.reason, - "candidate_format": candidate.candidate_format, - "patch_block_count": candidate.patch_block_count, - "lines_changed": candidate.lines_changed, - "preflight_path": str(workspace / "preflight.json"), - }, - ) - history_entry = { - "step": step, - "state_id": state_id, - "status": "preflight_failed", - "summary": candidate.summary, - "rationale": candidate.rationale, - "candidate_format": candidate.candidate_format, - "patch_block_count": candidate.patch_block_count, - "lines_changed": candidate.lines_changed, - "reward": _FAIL_REWARD, - "accepted": False, - "val_bpb": None, - "parent_val_bpb": current_best, - "stdout_path": "", - "stderr_path": "", - "workspace_path": str(workspace), - "improved_global_best": False, - "error": preflight.reason, - "prompt_path": str(workspace / "prompt.txt"), - "preflight_path": str(workspace / "preflight.json"), - "failure_stage": preflight.stage, - "failure_reason": preflight.reason, - } - self.runner.append_history(history_entry) - self.runner.write_rollout_manifest( - workspace, - { - "step": step, - "starting_state": state.to_dict() if hasattr(state, "to_dict") else { - "id": state_id, - "timestep": getattr(state, "timestep", -1), - }, - "candidate": { - "summary": candidate.summary, - "rationale": candidate.rationale, - "train_py": candidate.train_py, - "candidate_format": candidate.candidate_format, - "patch_block_count": candidate.patch_block_count, - "lines_changed": candidate.lines_changed, - }, - "prompt": prompt, - "prompt_path": str(workspace / "prompt.txt"), - "raw_response": code, - "raw_response_path": str(workspace / "response.txt"), - "preflight": preflight.to_dict(), - "preflight_path": str(workspace / "preflight.json"), - "evaluation": { - "status": "preflight_failed", - "workspace_path": str(workspace), - "metrics_path": str(metrics_path), - }, - "reward": _FAIL_REWARD, - "correctness": 0.0, - "message": preflight.reason, - "improved_global_best": False, - "failure_stage": preflight.stage, - "failure_reason": preflight.reason, - }, - ) - return self._failure_payload( - reward=_FAIL_REWARD, - raw_score=_FAIL_RAW_SCORE, - msg=preflight.reason, - status="preflight_failed", - ) - def _run_candidate(self, candidate: PatchCandidate, state: Any, workspace: Path) -> RunResult: if self.bootstrap is None or self.runner is None: raise RuntimeError("AutoResearchRewardEvaluator is not configured.") diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index e415826b..98df01dc 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -474,10 +474,16 @@ def _execute_workspace( if status == "success" and val_bpb is None: status = "missing_metric" + metrics = {"val_bpb": val_bpb} if val_bpb is not None and metrics_path.exists(): - metrics = json.loads(metrics_path.read_text(encoding="utf-8")) - else: - metrics = {"val_bpb": val_bpb} + try: + loaded_metrics = json.loads(metrics_path.read_text(encoding="utf-8")) + except json.JSONDecodeError as exc: + metrics["metrics_json_error"] = str(exc) + else: + if isinstance(loaded_metrics, dict): + metrics = loaded_metrics + metrics.setdefault("val_bpb", val_bpb) self._write_json(metrics_path, metrics) return RunResult( @@ -738,24 +744,9 @@ def _collect_defined_names(statements: list[ast.stmt]) -> set[str]: names: set[str] = set() for stmt in statements: names.update(_names_defined_by_stmt(stmt)) - for child_block in _child_statement_blocks(stmt): - names.update(_collect_defined_names(child_block)) return names -def _child_statement_blocks(stmt: ast.stmt) -> list[list[ast.stmt]]: - blocks: list[list[ast.stmt]] = [] - for attr in ("body", "orelse", "finalbody"): - value = getattr(stmt, attr, None) - if isinstance(value, list): - blocks.append(value) - handlers = getattr(stmt, "handlers", None) - if handlers: - for handler in handlers: - blocks.append(handler.body) - return blocks - - def _has_non_block_wrapper_text(response_text: str, blocks: list[re.Match[str]]) -> bool: pieces: list[str] = [] cursor = 0 From 7926ffaf4e5c736d49c748ba0348f371e8a6fc79 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Thu, 12 Mar 2026 07:18:23 +1100 Subject: [PATCH 11/17] Remove patch size limits from rollout edits --- tests/test_prompt_builder.py | 2 +- tests/test_runner.py | 30 ------------------------------ ttt_autoresearch/prompt_builder.py | 2 +- ttt_autoresearch/runner.py | 18 ------------------ 4 files changed, 2 insertions(+), 50 deletions(-) diff --git a/tests/test_prompt_builder.py b/tests/test_prompt_builder.py index 13fb2381..4e09f98c 100644 --- a/tests/test_prompt_builder.py +++ b/tests/test_prompt_builder.py @@ -44,7 +44,7 @@ def test_prompt_is_single_rollout_specific(self) -> None: self.assertIn("TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0", prompt) self.assertIn("Preserve the final summary prints", prompt) self.assertIn("Return only one or more exact SEARCH/REPLACE edit blocks for `train.py`", prompt) - self.assertIn("Prefer 1-3 small patch blocks", prompt) + self.assertIn("Prefer as few patch blocks as needed", prompt) self.assertIn("Treat each SEARCH block like an exact `old_string` tool argument", prompt) self.assertIn("Do not return standalone code fragments", prompt) self.assertIn("Do not wrap the answer in JSON", prompt) diff --git a/tests/test_runner.py b/tests/test_runner.py index da8fbd91..df70841b 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -162,36 +162,6 @@ def test_preflight_rejects_missing_val_bpb_summary(self) -> None: self.assertFalse(preflight.ok) self.assertEqual(preflight.stage, "summary_output") - def test_preflight_rejects_overly_large_patch_scope(self) -> None: - with tempfile.TemporaryDirectory() as tmpdir: - root = Path(tmpdir) - (root / "program.md").write_text("program", encoding="utf-8") - (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") - original = ( - "from prepare import MAX_SEQ_LEN\n" - "TOTAL_BATCH_SIZE = 2048\n" - "DEVICE_BATCH_SIZE = 1\n" - "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" - "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" - "class GPT:\n" - " def forward(self, idx, targets=None, reduction='mean'):\n" - " return 0\n" - "print(f\"val_bpb: {1.0:.6f}\")\n" - ) - (root / "train.py").write_text(original, encoding="utf-8") - config = TTTAutoResearchConfig(execution_backend="local").normalized(root) - runner = AutoResearchRunner(root, config, Path(config.run_dir)) - search_block = "print(f\"val_bpb: {1.0:.6f}\")\n" - replacement = "".join(f"print({idx})\n" for idx in range(250)) - candidate = parse_patch_candidate_for_state( - f"<<<<<<< SEARCH\n{search_block}=======\n{replacement}>>>>>>> REPLACE", - original, - ) - workspace = runner.prepare_candidate_workspace(candidate, step=0) - preflight = runner.preflight_candidate(workspace, candidate) - self.assertFalse(preflight.ok) - self.assertEqual(preflight.stage, "edit_scope") - def test_build_bootstrap_prefers_stored_baseline_snapshot(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) diff --git a/ttt_autoresearch/prompt_builder.py b/ttt_autoresearch/prompt_builder.py index f871a14e..81374515 100644 --- a/ttt_autoresearch/prompt_builder.py +++ b/ttt_autoresearch/prompt_builder.py @@ -63,7 +63,7 @@ def build_rollout_prompt( - You may only edit `train.py` - Do not modify `prepare.py`, dependencies, or the evaluation harness - Return only one or more exact SEARCH/REPLACE edit blocks for `train.py` -- Prefer 1-3 small patch blocks +- Prefer as few patch blocks as needed, but use as many exact patch blocks as necessary to make the edit apply cleanly - Each SEARCH block must copy exact contiguous text from the current `train.py` - If you change constants or a small code region, include enough surrounding context in SEARCH to make the patch unique - Treat each SEARCH block like an exact `old_string` tool argument: it must match exactly once diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index 98df01dc..60ba551f 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -28,8 +28,6 @@ VAL_BPB_PRINT_RE = re.compile(r"print\(\s*f?[\"']val_bpb:\s*", re.MULTILINE) FORWARD_WITH_REDUCTION_RE = re.compile(r"def\s+forward\s*\([^)]*\breduction\s*=", re.MULTILINE) _KNOWN_PREPARE_CONSTANTS = {"MAX_SEQ_LEN": 2048} -MAX_PATCH_BLOCKS = 3 -MAX_LINES_CHANGED = 160 @dataclass(slots=True) @@ -106,8 +104,6 @@ def apply_search_replace_patch(patch_text: str, current_train_py: str) -> tuple[ blocks = list(SEARCH_REPLACE_BLOCK_RE.finditer(patch_text)) if not blocks: raise ValueError("Candidate must contain one or more SEARCH/REPLACE patch blocks.") - if len(blocks) > MAX_PATCH_BLOCKS: - raise ValueError(f"Candidate must contain at most {MAX_PATCH_BLOCKS} SEARCH/REPLACE blocks.") updated = current_train_py for match in blocks: @@ -223,20 +219,6 @@ def prepare_candidate_workspace( def preflight_candidate(self, workspace: Path, candidate: PatchCandidate) -> PreflightResult: train_path = workspace / "train.py" source = train_path.read_text(encoding="utf-8") - if candidate.patch_block_count > MAX_PATCH_BLOCKS: - return PreflightResult( - ok=False, - stage="edit_scope", - reason=f"Candidate used {candidate.patch_block_count} patch blocks; limit is {MAX_PATCH_BLOCKS}.", - details={"patch_block_count": candidate.patch_block_count, "max_patch_blocks": MAX_PATCH_BLOCKS}, - ) - if candidate.lines_changed > MAX_LINES_CHANGED: - return PreflightResult( - ok=False, - stage="edit_scope", - reason=f"Candidate changed {candidate.lines_changed} lines; limit is {MAX_LINES_CHANGED}.", - details={"lines_changed": candidate.lines_changed, "max_lines_changed": MAX_LINES_CHANGED}, - ) try: module = ast.parse(source, filename=str(train_path)) except SyntaxError as exc: From 51069a357db263d3d5c2e4598caab78267996927 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Thu, 12 Mar 2026 09:00:41 +1100 Subject: [PATCH 12/17] Keep full 16-rollout batches in discover training --- tests/test_cli_integration.py | 2 +- tests/test_env_smoke.py | 5 ++++- ttt_autoresearch/cli.py | 7 ++++++- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/test_cli_integration.py b/tests/test_cli_integration.py index b8c6e049..96658fcc 100644 --- a/tests/test_cli_integration.py +++ b/tests/test_cli_integration.py @@ -173,7 +173,7 @@ async def fake_discover_main(cfg): self.assertEqual(captured["rl_config"]["adv_estimator"], "entropic_adaptive_beta") self.assertEqual(captured["rl_config"]["loss_fn"], "importance_sampling") self.assertEqual(captured["rl_config"]["num_substeps"], 1) - self.assertTrue(captured["rl_config"]["remove_constant_reward_groups"]) + self.assertFalse(captured["rl_config"]["remove_constant_reward_groups"]) self.assertEqual(captured["dataset_config"]["batch_size"], 3) self.assertEqual(captured["dataset_config"]["group_size"], 2) self.assertEqual(captured["dataset_config"]["problem_type"], "autoresearch") diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 7aed1a6e..34e3e68a 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -67,11 +67,14 @@ def test_env_prompt_and_reward_flow(self) -> None: AutoResearchRewardEvaluator.configure(bootstrap, runner) state = AutoResearchDiscoverEnv.create_initial_state("autoresearch") - env = AutoResearchDiscoverEnv(renderer=None, initial_state=state, sampler=None, config=type("Cfg", (), { + sampler = type("Sampler", (), {"step": 0})() + env = AutoResearchDiscoverEnv(renderer=None, initial_state=state, sampler=sampler, config=type("Cfg", (), { "problem_type": "autoresearch", "log_path": str(bootstrap.discover_log_dir), "eval_timeout": config.eval_timeout, + "timeout": config.eval_timeout, "num_cpus_per_task": 0, + "convo_prefix": [], })()) prompt = env.get_question() diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py index 8ae5b492..9e5f2656 100644 --- a/ttt_autoresearch/cli.py +++ b/ttt_autoresearch/cli.py @@ -119,7 +119,12 @@ def main(argv: list[str] | None = None) -> int: num_substeps=1, save_every=config.save_every, load_checkpoint_path=None, - remove_constant_reward_groups=True, + # Keep all rollout groups so each outer step always trains on the full + # configured batch (e.g. 2 groups x 8 rollouts = 16 total rollouts). + # Upstream discover's constant-reward-group filtering can collapse a + # step to a single 8-rollout group when rewards are uniform, which is + # not what we want for this environment. + remove_constant_reward_groups=False, phase1_max_tokens=config.phase1_max_tokens, local_model_path=config.local_model_path, ) From da7dcea83b615952c9738ad6eb1a771046ff8c36 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Thu, 12 Mar 2026 10:24:29 +1100 Subject: [PATCH 13/17] Improve rollout prompting and renderer defaults --- .gitignore | 1 + configs/ttt_discover_autoresearch.yaml | 4 ++-- tests/test_env_smoke.py | 2 ++ tests/test_prompt_builder.py | 16 +++++++++------ tests/test_runner.py | 2 +- ttt_autoresearch/cli.py | 2 ++ ttt_autoresearch/config.py | 3 ++- ttt_autoresearch/discover_compat.py | 28 ++++++++++++++++++++++++++ ttt_autoresearch/prompt_builder.py | 12 ++++++----- 9 files changed, 55 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index e7838ca4..163ba653 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ AGENTS.md # Experimental code/artifacts dev/ +testing/output/ # Results file results.tsv runs/ diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index fd28d46b..823c6977 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,4 +1,4 @@ -model_name: moonshotai/Kimi-K2.5 +model_name: openai/gpt-oss-120b provider: null api_base: null target_val_bpb: 0.85 @@ -12,7 +12,7 @@ data_path: null baseline_command_override: null candidate_command_override: null experiment_name: autoresearch-ttt-discover -renderer_name: qwen3 +renderer_name: gpt_oss_high_reasoning learning_rate: 0.00004 lora_rank: 32 kl_penalty_coef: 0.1 diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 34e3e68a..6ba9b064 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -87,7 +87,9 @@ def test_env_prompt_and_reward_flow(self) -> None: self.assertIn("## Rules", prompt) self.assertIn("You may want to start your search from the current training script shown above.", prompt) self.assertIn("This is the current starting point selected by the search procedure.", prompt) + self.assertIn("Pursue bold, high-upside changes", prompt) self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) + self.assertIn("Hyperparameter tuning is allowed, but do not stop there", prompt) self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) self.assertNotIn("Baseline val_bpb from the original script", prompt) self.assertNotIn("LOOP FOREVER", prompt) diff --git a/tests/test_prompt_builder.py b/tests/test_prompt_builder.py index 4e09f98c..9c20751a 100644 --- a/tests/test_prompt_builder.py +++ b/tests/test_prompt_builder.py @@ -12,15 +12,17 @@ def test_prompt_is_single_rollout_specific(self) -> None: construction_section=( "You may want to start your search from the current training script shown above.\n" "This is the current starting point selected by the search procedure.\n" - "Make one focused experimental change at a time and preserve a working script.\n" + "Preserve a working script, but do not limit yourself to tiny hyperparameter tweaks.\n" + "Pursue bold, high-upside changes when they are technically coherent and likely to materially improve val_bpb.\n" "You are encouraged to explore meaningfully different directions if the current approach appears saturated." ), code_section=( "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" - "Prefer small, local hill-climbing edits over broad rewrites.\n" + "Hyperparameter tuning is allowed, but do not stop there: pursue stronger algorithmic, architectural, data-flow, attention, optimization, or systems ideas when they could deliver a step-change improvement.\n" + "Prefer edits that are technically coherent and high-upside, even if they are more ambitious than simple hill-climbing.\n" "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" - "Do not refactor unrelated code.\n" + "Do not refactor unrelated code, but do make all integration edits required for the new idea to work cleanly.\n" "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." ), ) @@ -32,11 +34,13 @@ def test_prompt_is_single_rollout_specific(self) -> None: self.assertIn("You are iteratively optimizing val_bpb.", prompt) self.assertIn("You may want to start your search from the current training script shown above.", prompt) self.assertIn("This is the current starting point selected by the search procedure.", prompt) - self.assertIn("Make one focused experimental change at a time and preserve a working script.", prompt) + self.assertIn("do not limit yourself to tiny hyperparameter tweaks", prompt) + self.assertIn("Pursue bold, high-upside changes", prompt) self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) - self.assertIn("Prefer small, local hill-climbing edits over broad rewrites.", prompt) + self.assertIn("Hyperparameter tuning is allowed, but do not stop there", prompt) + self.assertIn("technically coherent and high-upside", prompt) self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) - self.assertIn("Do not refactor unrelated code.", prompt) + self.assertIn("do make all integration edits required", prompt) self.assertIn("Maximum sequence length is `2048`", prompt) self.assertIn("Validation uses the pinned shard `06542`", prompt) self.assertIn("vocab size `8192`", prompt) diff --git a/tests/test_runner.py b/tests/test_runner.py index df70841b..393bbb29 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -233,7 +233,7 @@ def test_kimi_model_is_primary_supported_renderer(self) -> None: model_name="moonshotai/Kimi-K2.5", execution_backend="local", ).normalized(Path(".")) - self.assertEqual(config.renderer_name, "qwen3") + self.assertEqual(config.renderer_name, "kimi_k25") def test_group_defaults_reflect_medium_preset(self) -> None: config = TTTAutoResearchConfig().normalized(Path(".")) diff --git a/ttt_autoresearch/cli.py b/ttt_autoresearch/cli.py index 9e5f2656..ba23deb5 100644 --- a/ttt_autoresearch/cli.py +++ b/ttt_autoresearch/cli.py @@ -9,6 +9,7 @@ from ttt_autoresearch.config import TTTAutoResearchConfig, load_config, write_resolved_config from ttt_autoresearch.discover_compat import ( + patch_ttt_discover_kimi_renderer, patch_transformers_kimi_trust_remote_code, patch_ttt_discover_kimi_tokenizer, patch_ttt_discover_no_wandb_bug, @@ -71,6 +72,7 @@ def main(argv: list[str] | None = None) -> int: patch_ttt_discover_no_wandb_bug() patch_ttt_discover_kimi_tokenizer() + patch_ttt_discover_kimi_renderer() runner = AutoResearchRunner(repo_root=repo_root, config=config, run_dir=run_dir) try: diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 33890f36..789e431e 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -11,6 +11,7 @@ DISCOVER_GIT_REV = "5df1a0ee9b04272ca33de0101ae64dd499e63f29" SUPPORTED_RENDERERS = ( + "kimi_k25", "qwen3", "qwen3_instruct", "gpt_oss_no_sysprompt", @@ -218,7 +219,7 @@ def subprocess_env(self) -> dict[str, str]: def infer_renderer_name(model_name: str) -> str | None: lowered = model_name.lower() if "kimi-k2" in lowered or "moonshotai/kimi" in lowered: - return "qwen3" + return "kimi_k25" if "qwen" in lowered: if "instruct" in lowered: return "qwen3_instruct" diff --git a/ttt_autoresearch/discover_compat.py b/ttt_autoresearch/discover_compat.py index 4ca9667a..2764ebf4 100644 --- a/ttt_autoresearch/discover_compat.py +++ b/ttt_autoresearch/discover_compat.py @@ -182,6 +182,34 @@ def patched_get_tokenizer(model_name: str): dataset_builder.get_tokenizer = misc_utils.get_tokenizer +def patch_ttt_discover_kimi_renderer() -> None: + """Register a Kimi-specific renderer alias that maps to the Qwen3 format. + + Tinker exposes Kimi K2.5 as a reasoning model, but upstream discover only + knows about ``qwen3`` renderer names for this token format. Expose an + explicit ``kimi_k25`` renderer so the local config matches the model family + while preserving the same underlying rendering behavior. + """ + + try: + from ttt_discover.tinker_utils import renderers + except ImportError: + return + + if getattr(renderers, "_autoresearch_kimi_renderer_patch", False): + return + + original_get_renderer = renderers.get_renderer + + def patched_get_renderer(name: str, tokenizer: Any): + if name == "kimi_k25": + return renderers.Qwen3Renderer(tokenizer) + return original_get_renderer(name, tokenizer) + + renderers.get_renderer = patched_get_renderer + renderers._autoresearch_kimi_renderer_patch = True + + def patch_transformers_kimi_trust_remote_code() -> None: """Force trust_remote_code=True for Kimi K2.5 tokenizer loads. diff --git a/ttt_autoresearch/prompt_builder.py b/ttt_autoresearch/prompt_builder.py index 81374515..3ce21772 100644 --- a/ttt_autoresearch/prompt_builder.py +++ b/ttt_autoresearch/prompt_builder.py @@ -6,16 +6,18 @@ CONSTRUCTION_SECTION = ( "You may want to start your search from the current training script shown above.\n" "This is the current starting point selected by the search procedure.\n" - "Make one focused experimental change at a time and preserve a working script.\n" + "Preserve a working script, but do not limit yourself to tiny hyperparameter tweaks.\n" + "Pursue bold, high-upside changes when they are technically coherent and likely to materially improve val_bpb.\n" "You are encouraged to explore meaningfully different directions if the current approach appears saturated." ) CODE_SECTION = ( "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" - "Prefer small, local hill-climbing edits over broad rewrites.\n" + "Hyperparameter tuning is allowed, but do not stop there: pursue stronger algorithmic, architectural, data-flow, attention, optimization, or systems ideas when they could deliver a step-change improvement.\n" + "Prefer edits that are technically coherent and high-upside, even if they are more ambitious than simple hill-climbing.\n" "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" - "Do not refactor unrelated code.\n" + "Do not refactor unrelated code, but do make all integration edits required for the new idea to work cleanly.\n" "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." ) @@ -45,7 +47,7 @@ def build_rollout_prompt( **Lower `val_bpb` values are better** - they indicate a stronger model under the fixed evaluation budget. ## Budget & Resources -- **Time budget**: 5 minutes of wall-clock training time +- **Time budget**: 5 minutes of wall-clock training time on a single NVIDIA H100 GPU - **Evaluation harness**: fixed AutoResearch runner - **VRAM**: moderate increases are acceptable for meaningful gains, but avoid wasteful blowups @@ -82,7 +84,7 @@ def build_rollout_prompt( - The SEARCH text must match the current starting `train.py` exactly - Propose exactly one candidate for this rollout - Optimize for the lowest `val_bpb` under the fixed time budget -- Prefer simpler changes when improvement is similar +- Prefer simpler changes when improvement is similar (large changes such as architectural or similar changes are preferred when improvement is large) ## Example Response <<<<<<< SEARCH From 4b4573ac70b76da81c8ec7f2a1cb9f894e3d4a7c Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Thu, 12 Mar 2026 10:49:18 +1100 Subject: [PATCH 14/17] Relax rollout preflight and add prompt validators --- testing/run_tinker_reasoning_smoke.py | 318 ++++++++++++++++++ testing/validate_rollout_prompt.py | 449 ++++++++++++++++++++++++++ tests/test_runner.py | 33 ++ ttt_autoresearch/runner.py | 9 - 4 files changed, 800 insertions(+), 9 deletions(-) create mode 100644 testing/run_tinker_reasoning_smoke.py create mode 100644 testing/validate_rollout_prompt.py diff --git a/testing/run_tinker_reasoning_smoke.py b/testing/run_tinker_reasoning_smoke.py new file mode 100644 index 00000000..eb0e8dd8 --- /dev/null +++ b/testing/run_tinker_reasoning_smoke.py @@ -0,0 +1,318 @@ +from __future__ import annotations + +import argparse +import json +import os +import sys +from dataclasses import asdict, dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +import tinker + +REPO_ROOT = Path(__file__).resolve().parents[1] +if str(REPO_ROOT) not in sys.path: + sys.path.insert(0, str(REPO_ROOT)) + +from ttt_autoresearch.discover_compat import ( + patch_transformers_kimi_trust_remote_code, + patch_ttt_discover_kimi_renderer, + patch_ttt_discover_kimi_tokenizer, +) +from ttt_autoresearch.env import AutoResearchState +from ttt_autoresearch.prompt_builder import build_prompt_for_state + + +patch_ttt_discover_kimi_tokenizer() +patch_ttt_discover_kimi_renderer() +patch_transformers_kimi_trust_remote_code() + +from ttt_discover.tinker_utils import renderers + + +DEFAULT_PROMPT_FILE = REPO_ROOT / "prompt.txt" +DEFAULT_OUTPUT_DIR = REPO_ROOT / "testing" / "output" + + +@dataclass(frozen=True) +class ModelSpec: + key: str + model_name: str + renderer_name: str + label: str + + +MODEL_SPECS = ( + ModelSpec( + key="kimi", + model_name="moonshotai/Kimi-K2.5", + renderer_name="kimi_k25", + label="Kimi K2.5 via Tinker", + ), + ModelSpec( + key="gpt_oss", + model_name="openai/gpt-oss-120b", + renderer_name="gpt_oss_high_reasoning", + label="GPT-OSS-120B high reasoning via Tinker", + ), +) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Temporary smoke test for Tinker reasoning models." + ) + parser.add_argument( + "--prompt", + help="Prompt text to send. If omitted, --prompt-file is used, or prompt.txt if present.", + ) + parser.add_argument( + "--prompt-file", + type=Path, + help="File containing the prompt text.", + ) + parser.add_argument( + "--api-key", + help="Tinker API key. Defaults to TINKER_API_KEY from the environment.", + ) + parser.add_argument( + "--prompt-mode", + choices=["raw", "normal_full"], + default="raw", + help="Use a raw prompt or build the normal full AutoResearch prompt from train.py.", + ) + parser.add_argument( + "--train-file", + type=Path, + default=REPO_ROOT / "train.py", + help="train.py to embed when --prompt-mode normal_full is used.", + ) + parser.add_argument( + "--current-val-bpb", + type=float, + default=1.0, + help="Current val_bpb used when building the normal full prompt.", + ) + parser.add_argument( + "--target-val-bpb", + type=float, + default=0.85, + help="Target val_bpb used when building the normal full prompt.", + ) + parser.add_argument( + "--base-url", + help="Override Tinker base URL. Defaults to TINKER_BASE_URL or SDK default.", + ) + parser.add_argument( + "--max-tokens", + type=int, + default=None, + help="Optional maximum tokens to sample per model. Omit for no generation cap.", + ) + parser.add_argument( + "--temperature", + type=float, + default=1.0, + help="Sampling temperature.", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=DEFAULT_OUTPUT_DIR, + help="Directory where run artifacts are saved.", + ) + parser.add_argument( + "--models", + nargs="+", + choices=[spec.key for spec in MODEL_SPECS], + default=[spec.key for spec in MODEL_SPECS], + help="Subset of models to run.", + ) + return parser.parse_args() + + +def resolve_prompt(args: argparse.Namespace) -> tuple[str, str]: + if args.prompt_mode == "normal_full": + if args.prompt or args.prompt_file: + raise SystemExit("--prompt/--prompt-file cannot be combined with --prompt-mode normal_full.") + train_path = args.train_file.resolve() + train_text = train_path.read_text(encoding="utf-8") + state = AutoResearchState( + timestep=-1, + construction=[], + code=train_text, + value=-args.current_val_bpb, + ) + prompt = build_prompt_for_state(state, args.target_val_bpb) + return prompt, f"normal_full:{train_path}" + if args.prompt and args.prompt_file: + raise SystemExit("Pass either --prompt or --prompt-file, not both.") + if args.prompt: + return args.prompt, "inline" + if args.prompt_file: + return args.prompt_file.read_text(encoding="utf-8"), str(args.prompt_file.resolve()) + if DEFAULT_PROMPT_FILE.exists(): + return DEFAULT_PROMPT_FILE.read_text(encoding="utf-8"), str(DEFAULT_PROMPT_FILE) + raise SystemExit("No prompt provided. Pass --prompt or --prompt-file.") + + +def extract_kimi_sections(text: str) -> dict[str, str | None]: + think_open = "" + think_close = "" + if think_open in text and think_close in text: + thinking_start = text.index(think_open) + len(think_open) + thinking_end = text.index(think_close, thinking_start) + thinking = text[thinking_start:thinking_end].strip() + final = text[thinking_end + len(think_close) :].strip() + return {"thinking": thinking or None, "final": final or None} + return {"thinking": None, "final": text.strip() or None} + + +def extract_gpt_oss_sections(text: str) -> dict[str, str | None]: + analysis_marker = "<|channel|>analysis<|message|>" + final_marker = "<|channel|>final<|message|>" + handoff_marker = "<|end|><|start|>assistant" + thinking: str | None = None + final: str | None = None + + final_index = text.find(final_marker) + if final_index != -1: + final = text[final_index + len(final_marker) :].strip() or None + + analysis_index = text.find(analysis_marker) + if analysis_index != -1: + analysis_start = analysis_index + len(analysis_marker) + analysis_end = text.find(handoff_marker, analysis_start) + if analysis_end == -1: + analysis_end = final_index if final_index != -1 else len(text) + thinking = text[analysis_start:analysis_end].strip() or None + + if final is None and final_index == -1: + final = text.strip() or None + + return {"thinking": thinking, "final": final} + + +def extract_sections(spec: ModelSpec, text: str) -> dict[str, str | None]: + if spec.key == "kimi": + return extract_kimi_sections(text) + if spec.key == "gpt_oss": + return extract_gpt_oss_sections(text) + return {"thinking": None, "final": text.strip() or None} + + +def run_single_model( + service_client: tinker.ServiceClient, + spec: ModelSpec, + prompt: str, + max_tokens: int | None, + temperature: float, +) -> dict[str, Any]: + print(f"Starting model: {spec.label}", flush=True) + tokenizer = renderers.get_tokenizer(spec.model_name) + renderer = renderers.get_renderer(spec.renderer_name, tokenizer) + prompt_input = renderer.build_generation_prompt( + [{"role": "user", "content": prompt}], + ) + sampling_client = service_client.create_sampling_client(base_model=spec.model_name) + sample_response = sampling_client.sample( + prompt=prompt_input, + num_samples=1, + sampling_params=tinker.SamplingParams( + stop=renderer.get_stop_sequences(), + max_tokens=max_tokens, + temperature=temperature, + ), + ).result() + sequence = sample_response.sequences[0] + parsed_message, parsed_ok = renderer.parse_response(sequence.tokens) + content = parsed_message.get("content", "") + if not isinstance(content, str): + raise ValueError(f"Expected string content from renderer, got: {type(content)!r}") + sections = extract_sections(spec, content) + return { + "spec": asdict(spec), + "parsed_ok": parsed_ok, + "token_count": len(sequence.tokens), + "raw_content": content, + "thinking": sections["thinking"], + "final": sections["final"], + } + + +def format_console_output(result: dict[str, Any]) -> str: + title = result["spec"]["label"] + parts = [f"===== {title} ====="] + thinking = result.get("thinking") + final = result.get("final") + if thinking: + parts.append("THINKING:") + parts.append(thinking) + if final: + parts.append("FINAL:") + parts.append(final) + if not thinking and not final: + parts.append("RAW:") + parts.append(result.get("raw_content", "")) + return "\n".join(parts) + + +def save_artifact( + output_dir: Path, + prompt: str, + prompt_source: str, + results: list[dict[str, Any]], +) -> Path: + output_dir.mkdir(parents=True, exist_ok=True) + timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ") + artifact_path = output_dir / f"tinker_reasoning_smoke_{timestamp}.json" + payload = { + "created_at_utc": timestamp, + "prompt_source": prompt_source, + "prompt": prompt, + "results": results, + } + artifact_path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8") + return artifact_path + + +def main() -> int: + args = parse_args() + prompt, prompt_source = resolve_prompt(args) + print(f"Prompt source: {prompt_source}", flush=True) + print(f"Selected models: {', '.join(args.models)}", flush=True) + api_key = args.api_key or os.environ.get("TINKER_API_KEY") + if not api_key: + raise SystemExit("Missing TINKER_API_KEY. Pass --api-key or set the environment variable.") + + service_kwargs: dict[str, Any] = {"api_key": api_key} + if args.base_url: + service_kwargs["base_url"] = args.base_url + + requested = set(args.models) + selected_specs = [spec for spec in MODEL_SPECS if spec.key in requested] + if not selected_specs: + raise SystemExit("No models selected.") + + service_client = tinker.ServiceClient(**service_kwargs) + results: list[dict[str, Any]] = [] + for spec in selected_specs: + result = run_single_model( + service_client=service_client, + spec=spec, + prompt=prompt, + max_tokens=args.max_tokens, + temperature=args.temperature, + ) + results.append(result) + print(format_console_output(result), flush=True) + print(flush=True) + + artifact_path = save_artifact(args.output_dir, prompt, prompt_source, results) + print(f"Saved artifact: {artifact_path}", flush=True) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/testing/validate_rollout_prompt.py b/testing/validate_rollout_prompt.py new file mode 100644 index 00000000..5e7f0c41 --- /dev/null +++ b/testing/validate_rollout_prompt.py @@ -0,0 +1,449 @@ +from __future__ import annotations + +import argparse +import json +import os +import sys +import tempfile +from dataclasses import asdict, dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +import tinker + +REPO_ROOT = Path(__file__).resolve().parents[1] +if str(REPO_ROOT) not in sys.path: + sys.path.insert(0, str(REPO_ROOT)) + +from ttt_autoresearch.config import TTTAutoResearchConfig +from ttt_autoresearch.discover_compat import ( + patch_transformers_kimi_trust_remote_code, + patch_ttt_discover_kimi_renderer, + patch_ttt_discover_kimi_tokenizer, +) +from ttt_autoresearch.env import AutoResearchState +from ttt_autoresearch.prompt_builder import build_prompt_for_state +from ttt_autoresearch.runner import AutoResearchRunner, parse_patch_candidate_for_state + + +patch_ttt_discover_kimi_tokenizer() +patch_ttt_discover_kimi_renderer() +patch_transformers_kimi_trust_remote_code() + +from ttt_discover.tinker_utils import renderers + + +DEFAULT_OUTPUT_DIR = REPO_ROOT / "testing" / "output" + + +@dataclass(frozen=True) +class ModelSpec: + key: str + model_name: str + renderer_name: str + label: str + use_gpt_oss_sections: bool = False + + +MODEL_SPECS = { + "gpt_oss": ModelSpec( + key="gpt_oss", + model_name="openai/gpt-oss-120b", + renderer_name="gpt_oss_high_reasoning", + label="GPT-OSS-120B high reasoning via Tinker", + use_gpt_oss_sections=True, + ), + "kimi": ModelSpec( + key="kimi", + model_name="moonshotai/Kimi-K2.5", + renderer_name="kimi_k25", + label="Kimi K2.5 via Tinker", + use_gpt_oss_sections=False, + ), +} + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Validate rollout-style prompt outputs against the current train.py." + ) + parser.add_argument( + "--api-key", + help="Tinker API key. Defaults to TINKER_API_KEY from the environment.", + ) + parser.add_argument( + "--model", + choices=sorted(MODEL_SPECS), + default="gpt_oss", + help="Model to test. Defaults to the repo's current rollout default.", + ) + parser.add_argument( + "--num-samples", + type=int, + default=8, + help="Number of rollout-style prompt samples to generate.", + ) + parser.add_argument( + "--train-file", + type=Path, + default=REPO_ROOT / "train.py", + help="train.py to embed in the prompt and validate against.", + ) + parser.add_argument( + "--current-val-bpb", + type=float, + default=1.0, + help="Current val_bpb used to build the prompt.", + ) + parser.add_argument( + "--target-val-bpb", + type=float, + default=0.85, + help="Target val_bpb used to build the prompt.", + ) + parser.add_argument( + "--temperature", + type=float, + default=1.0, + help="Sampling temperature.", + ) + parser.add_argument( + "--max-tokens", + type=int, + default=26000, + help="Total prompt-plus-output token budget. Defaults to the repo's GPT-OSS rollout setting.", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=DEFAULT_OUTPUT_DIR, + help="Directory where run artifacts are saved.", + ) + return parser.parse_args() + + +def build_prompt(train_file: Path, current_val_bpb: float, target_val_bpb: float) -> tuple[str, str]: + train_path = train_file.resolve() + train_text = train_path.read_text(encoding="utf-8") + state = AutoResearchState( + timestep=-1, + construction=[], + code=train_text, + value=-current_val_bpb, + ) + prompt = build_prompt_for_state(state, target_val_bpb) + return prompt, train_text + + +def extract_kimi_sections(text: str) -> dict[str, str | None]: + think_open = "" + think_close = "" + if think_open in text and think_close in text: + thinking_start = text.index(think_open) + len(think_open) + thinking_end = text.index(think_close, thinking_start) + thinking = text[thinking_start:thinking_end].strip() + final = text[thinking_end + len(think_close) :].strip() + return {"thinking": thinking or None, "final": final or None} + return {"thinking": None, "final": text.strip() or None} + + +def extract_gpt_oss_sections(text: str) -> dict[str, str | None]: + analysis_marker = "<|channel|>analysis<|message|>" + final_marker = "<|channel|>final<|message|>" + handoff_marker = "<|end|><|start|>assistant" + thinking: str | None = None + final: str | None = None + + final_index = text.find(final_marker) + if final_index != -1: + final = text[final_index + len(final_marker) :].strip() or None + + analysis_index = text.find(analysis_marker) + if analysis_index != -1: + analysis_start = analysis_index + len(analysis_marker) + analysis_end = text.find(handoff_marker, analysis_start) + if analysis_end == -1: + analysis_end = final_index if final_index != -1 else len(text) + thinking = text[analysis_start:analysis_end].strip() or None + + if final is None and final_index == -1: + final = text.strip() or None + + return {"thinking": thinking, "final": final} + + +def extract_sections(spec: ModelSpec, text: str) -> dict[str, str | None]: + if spec.use_gpt_oss_sections: + return extract_gpt_oss_sections(text) + return extract_kimi_sections(text) + + +def classify_boldness(lines_changed: int, final_text: str, updated_train_py: str, current_train_py: str) -> dict[str, Any]: + categories: list[str] = [] + if "def " in final_text and ">>>>>>> REPLACE" in final_text: + categories.append("code_logic") + + architecture_markers = ( + "DEPTH", + "ASPECT_RATIO", + "HEAD_DIM", + "WINDOW_PATTERN", + "build_model_config", + "class GPT", + "class Block", + "class CausalSelfAttention", + ) + optimizer_markers = ( + "EMBEDDING_LR", + "UNEMBEDDING_LR", + "MATRIX_LR", + "SCALAR_LR", + "WEIGHT_DECAY", + "ADAM_BETAS", + "WARMUP_RATIO", + "WARMDOWN_RATIO", + "FINAL_LR_FRAC", + "setup_optimizer", + "get_lr_multiplier", + ) + throughput_markers = ( + "TOTAL_BATCH_SIZE", + "DEVICE_BATCH_SIZE", + "grad_accum_steps", + ) + + if any(marker in final_text for marker in architecture_markers): + categories.append("architecture") + if any(marker in final_text for marker in optimizer_markers): + categories.append("optimizer_or_schedule") + if any(marker in final_text for marker in throughput_markers): + categories.append("throughput") + + if not categories: + categories.append("unclear") + + if lines_changed >= 80 or "code_logic" in categories: + rating = "bold" + elif lines_changed >= 25 or "architecture" in categories: + rating = "moderate" + else: + rating = "conservative" + + return { + "rating": rating, + "categories": categories, + "lines_changed": lines_changed, + "net_char_delta": len(updated_train_py) - len(current_train_py), + } + + +def evaluate_sample( + sample_index: int, + spec: ModelSpec, + renderer: Any, + sequence: Any, + current_train_py: str, +) -> dict[str, Any]: + parsed_message, parsed_ok = renderer.parse_response(sequence.tokens) + content = parsed_message.get("content", "") + if not isinstance(content, str): + raise ValueError(f"Expected string content from renderer, got: {type(content)!r}") + + sections = extract_sections(spec, content) + final_text = sections["final"] or "" + + validation: dict[str, Any] = { + "sample_index": sample_index, + "parsed_ok": parsed_ok, + "token_count": len(sequence.tokens), + "thinking": sections["thinking"], + "final": sections["final"], + "raw_content": content, + } + + try: + candidate = parse_patch_candidate_for_state(final_text, current_train_py) + except ValueError as exc: + validation.update( + { + "format_pass": False, + "format_reason": str(exc), + "candidate_format": None, + "patch_block_count": 0, + "lines_changed": 0, + "preflight_ok": False, + "preflight_stage": None, + "preflight_reason": None, + "boldness": { + "rating": "unknown", + "categories": ["invalid_format"], + "lines_changed": 0, + "net_char_delta": 0, + }, + } + ) + return validation + + with tempfile.TemporaryDirectory(prefix="rollout-validate-") as tmpdir: + root = Path(tmpdir) + config = TTTAutoResearchConfig(execution_backend="local").normalized(REPO_ROOT) + runner = AutoResearchRunner(REPO_ROOT, config, root / "run") + workspace = runner.prepare_candidate_workspace(candidate, step=sample_index, prefix="validate") + preflight = runner.preflight_candidate(workspace, candidate) + + boldness = classify_boldness( + candidate.lines_changed, + final_text, + candidate.train_py, + current_train_py, + ) + validation.update( + { + "format_pass": True, + "format_reason": "Patch parsed successfully.", + "candidate_format": candidate.candidate_format, + "patch_block_count": candidate.patch_block_count, + "lines_changed": candidate.lines_changed, + "preflight_ok": preflight.ok, + "preflight_stage": preflight.stage, + "preflight_reason": preflight.reason, + "preflight_details": preflight.details, + "boldness": boldness, + } + ) + return validation + + +def summarize(results: list[dict[str, Any]]) -> dict[str, Any]: + format_passes = sum(1 for item in results if item["format_pass"]) + preflight_passes = sum(1 for item in results if item["format_pass"] and item["preflight_ok"]) + + bold_counts: dict[str, int] = {} + for item in results: + rating = item["boldness"]["rating"] + bold_counts[rating] = bold_counts.get(rating, 0) + 1 + + impressive = [ + { + "sample_index": item["sample_index"], + "lines_changed": item["lines_changed"], + "categories": item["boldness"]["categories"], + "preflight_ok": item["preflight_ok"], + "preview": (item["final"] or "")[:400], + } + for item in results + if item["format_pass"] and ( + item["boldness"]["rating"] == "bold" + or "architecture" in item["boldness"]["categories"] + or "code_logic" in item["boldness"]["categories"] + ) + ] + + return { + "num_samples": len(results), + "format_passes": format_passes, + "format_failures": len(results) - format_passes, + "preflight_passes": preflight_passes, + "preflight_failures": len(results) - preflight_passes, + "format_pass_rate": format_passes / len(results) if results else 0.0, + "preflight_pass_rate": preflight_passes / len(results) if results else 0.0, + "boldness_counts": bold_counts, + "impressive_candidates": impressive, + } + + +def write_artifact( + output_dir: Path, + payload: dict[str, Any], +) -> Path: + output_dir.mkdir(parents=True, exist_ok=True) + timestamp = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ") + artifact_path = output_dir / f"rollout_prompt_validation_{timestamp}.json" + artifact_path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8") + return artifact_path + + +def main() -> int: + args = parse_args() + api_key = args.api_key or os.environ.get("TINKER_API_KEY") + if not api_key: + raise SystemExit("Missing TINKER_API_KEY. Pass --api-key or set the environment variable.") + + spec = MODEL_SPECS[args.model] + prompt, current_train_py = build_prompt( + args.train_file, + args.current_val_bpb, + args.target_val_bpb, + ) + + print(f"Model: {spec.label}", flush=True) + print(f"Samples: {args.num_samples}", flush=True) + print(f"Prompt source: {args.train_file.resolve()}", flush=True) + + tokenizer = renderers.get_tokenizer(spec.model_name) + renderer = renderers.get_renderer(spec.renderer_name, tokenizer) + prompt_input = renderer.build_generation_prompt([{"role": "user", "content": prompt}]) + available_output_tokens = args.max_tokens - prompt_input.length + if available_output_tokens <= 0: + raise SystemExit( + f"Prompt length {prompt_input.length} exceeds max token budget {args.max_tokens}." + ) + + service_client = tinker.ServiceClient(api_key=api_key) + sampling_client = service_client.create_sampling_client(base_model=spec.model_name) + + print("Submitting Tinker sample request...", flush=True) + sample_response = sampling_client.sample( + prompt=prompt_input, + num_samples=args.num_samples, + sampling_params=tinker.SamplingParams( + stop=renderer.get_stop_sequences(), + max_tokens=available_output_tokens, + temperature=args.temperature, + ), + ).result() + print("Tinker response received. Validating candidates...", flush=True) + + results: list[dict[str, Any]] = [] + for index, sequence in enumerate(sample_response.sequences, start=1): + result = evaluate_sample( + sample_index=index, + spec=spec, + renderer=renderer, + sequence=sequence, + current_train_py=current_train_py, + ) + results.append(result) + status = "PASS" if result["format_pass"] and result["preflight_ok"] else "FAIL" + print( + f"Sample {index}: {status} | format={result['format_pass']} | " + f"preflight={result['preflight_ok']} | boldness={result['boldness']['rating']}", + flush=True, + ) + + summary = summarize(results) + payload = { + "created_at_utc": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), + "model": asdict(spec), + "num_samples": args.num_samples, + "temperature": args.temperature, + "max_tokens_budget": args.max_tokens, + "available_output_tokens": available_output_tokens, + "train_file": str(args.train_file.resolve()), + "summary": summary, + "results": results, + } + artifact_path = write_artifact(args.output_dir, payload) + + print( + f"Summary: format {summary['format_passes']}/{summary['num_samples']}, " + f"preflight {summary['preflight_passes']}/{summary['num_samples']}", + flush=True, + ) + print(f"Saved artifact: {artifact_path}", flush=True) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/tests/test_runner.py b/tests/test_runner.py index 393bbb29..f47d7e08 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -134,6 +134,39 @@ def test_top_level_undefined_name_does_not_treat_nested_bindings_as_module_scope ) self.assertEqual(_find_top_level_undefined_name(module), "x") + def test_preflight_allows_runtime_names_defined_in_top_level_loop(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("program", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\nMAX_SEQ_LEN = 2048\n", encoding="utf-8") + source = ( + "from prepare import MAX_SEQ_LEN, TIME_BUDGET\n" + "TOTAL_BATCH_SIZE = 4096\n" + "DEVICE_BATCH_SIZE = 2\n" + "tokens_per_fwdbwd = DEVICE_BATCH_SIZE * MAX_SEQ_LEN\n" + "assert TOTAL_BATCH_SIZE % tokens_per_fwdbwd == 0\n" + "progress = 0.5\n" + "while True:\n" + " pct_done = 100 * progress\n" + " print(pct_done)\n" + " break\n" + "class GPT:\n" + " def forward(self, idx, targets=None, reduction='mean'):\n" + " return 0\n" + "print(f\"val_bpb: {1.0:.6f}\")\n" + ) + (root / "train.py").write_text(source, encoding="utf-8") + config = TTTAutoResearchConfig(execution_backend="local").normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + candidate = parse_patch_candidate_for_state( + "<<<<<<< SEARCH\nprint(pct_done)\n=======\nprint(f'progress={pct_done}')\n>>>>>>> REPLACE", + source, + ) + workspace = runner.prepare_candidate_workspace(candidate, step=0) + preflight = runner.preflight_candidate(workspace, candidate) + self.assertTrue(preflight.ok) + self.assertEqual(preflight.stage, "ok") + def test_preflight_rejects_missing_val_bpb_summary(self) -> None: with tempfile.TemporaryDirectory() as tmpdir: root = Path(tmpdir) diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index 60ba551f..0480c430 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -234,15 +234,6 @@ def preflight_candidate(self, workspace: Path, candidate: PatchCandidate) -> Pre }, ) - undefined_name = _find_top_level_undefined_name(module) - if undefined_name is not None: - return PreflightResult( - ok=False, - stage="top_level_names", - reason=f"Top-level code references undefined name {undefined_name!r}.", - details={"name": undefined_name}, - ) - if not VAL_BPB_PRINT_RE.search(source): return PreflightResult( ok=False, From 1f9bc7b6d647f40f21ba910f282f64ea4aa9c0ff Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Thu, 12 Mar 2026 15:02:20 +1100 Subject: [PATCH 15/17] Raise rollout target to 0.97 --- README.md | 8 ++++---- configs/ttt_discover_autoresearch.yaml | 2 +- configs/ttt_discover_autoresearch_large.yaml | 2 +- configs/ttt_discover_autoresearch_medium.yaml | 2 +- configs/ttt_discover_autoresearch_small.yaml | 2 +- testing/run_tinker_reasoning_smoke.py | 2 +- testing/validate_rollout_prompt.py | 2 +- ttt_autoresearch/config.py | 2 +- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 6d82015d..d02e717f 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ The shipped medium/large presets use: Both primary model modes use the same practical unattended search shape: -- `target_val_bpb: 0.85` +- `target_val_bpb: 0.97` - `execution_backend: hyperbolic` - `groups_per_step: 2` - `samples_per_step: 8` @@ -310,14 +310,14 @@ The main knobs for unattended Hyperbolic execution are: The checked-in presets use: ```yaml -target_val_bpb: 0.85 +target_val_bpb: 0.97 ``` This is a prompt-side benchmark target, not a reward cap. -- the model is shown the current starting state and the gap to `0.85` +- the model is shown the current starting state and the gap to `0.97` - the RL reward still comes from the actual achieved `val_bpb` -- if a rollout beats `0.85`, it is still rewarded more for going even lower +- if a rollout beats `0.97`, it is still rewarded more for going even lower This mirrors how upstream `discover` environments use fixed benchmark targets in the prompt while computing reward from the evaluated task score. diff --git a/configs/ttt_discover_autoresearch.yaml b/configs/ttt_discover_autoresearch.yaml index 823c6977..a58c715c 100644 --- a/configs/ttt_discover_autoresearch.yaml +++ b/configs/ttt_discover_autoresearch.yaml @@ -1,7 +1,7 @@ model_name: openai/gpt-oss-120b provider: null api_base: null -target_val_bpb: 0.85 +target_val_bpb: 0.97 max_steps: 12 groups_per_step: 2 samples_per_step: 8 diff --git a/configs/ttt_discover_autoresearch_large.yaml b/configs/ttt_discover_autoresearch_large.yaml index 793814a8..441fc84d 100644 --- a/configs/ttt_discover_autoresearch_large.yaml +++ b/configs/ttt_discover_autoresearch_large.yaml @@ -1,7 +1,7 @@ model_name: openai/gpt-oss-120b provider: null api_base: null -target_val_bpb: 0.85 +target_val_bpb: 0.97 max_steps: 20 groups_per_step: 2 samples_per_step: 8 diff --git a/configs/ttt_discover_autoresearch_medium.yaml b/configs/ttt_discover_autoresearch_medium.yaml index 05797b51..181faa1b 100644 --- a/configs/ttt_discover_autoresearch_medium.yaml +++ b/configs/ttt_discover_autoresearch_medium.yaml @@ -1,7 +1,7 @@ model_name: openai/gpt-oss-120b provider: null api_base: null -target_val_bpb: 0.85 +target_val_bpb: 0.97 max_steps: 12 groups_per_step: 2 samples_per_step: 8 diff --git a/configs/ttt_discover_autoresearch_small.yaml b/configs/ttt_discover_autoresearch_small.yaml index 00c75c01..48d0afa7 100644 --- a/configs/ttt_discover_autoresearch_small.yaml +++ b/configs/ttt_discover_autoresearch_small.yaml @@ -1,7 +1,7 @@ model_name: openai/gpt-oss-120b provider: null api_base: null -target_val_bpb: 0.85 +target_val_bpb: 0.97 max_steps: 12 groups_per_step: 2 samples_per_step: 4 diff --git a/testing/run_tinker_reasoning_smoke.py b/testing/run_tinker_reasoning_smoke.py index eb0e8dd8..b4b58b2b 100644 --- a/testing/run_tinker_reasoning_smoke.py +++ b/testing/run_tinker_reasoning_smoke.py @@ -97,7 +97,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--target-val-bpb", type=float, - default=0.85, + default=0.97, help="Target val_bpb used when building the normal full prompt.", ) parser.add_argument( diff --git a/testing/validate_rollout_prompt.py b/testing/validate_rollout_prompt.py index 5e7f0c41..f2208af7 100644 --- a/testing/validate_rollout_prompt.py +++ b/testing/validate_rollout_prompt.py @@ -99,7 +99,7 @@ def parse_args() -> argparse.Namespace: parser.add_argument( "--target-val-bpb", type=float, - default=0.85, + default=0.97, help="Target val_bpb used to build the prompt.", ) parser.add_argument( diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 789e431e..47ff0337 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -26,7 +26,7 @@ class TTTAutoResearchConfig: model_name: str = "openai/gpt-oss-120b" provider: str | None = None api_base: str | None = None - target_val_bpb: float | None = 0.85 + target_val_bpb: float | None = 0.97 max_steps: int = 12 groups_per_step: int = 2 samples_per_step: int = 8 From 0d0a236ae134ea29166fa21da7dca58b8193fd05 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Fri, 13 Mar 2026 11:37:39 +1100 Subject: [PATCH 16/17] Improve rollout prompt and raw patch handling --- tests/test_env_smoke.py | 122 +++++++++++++++ tests/test_prompt_builder.py | 89 +++++------ tests/test_runner.py | 36 ++++- ttt_autoresearch/env.py | 57 +++++-- ttt_autoresearch/prompt_builder.py | 231 ++++++++++++++++++++++------- ttt_autoresearch/runner.py | 46 ++++-- 6 files changed, 449 insertions(+), 132 deletions(-) diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 6ba9b064..8fc1359a 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -104,6 +104,128 @@ def test_env_prompt_and_reward_flow(self) -> None: next_state = env._create_next_state(0, payload, verify) self.assertAlmostEqual(next_state.current_best_val_bpb, 0.9) + def test_env_step_accepts_raw_search_replace_response(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("Focus on val_bpb.", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text(MINIMAL_VALID_TRAIN_PY, encoding="utf-8") + fixtures = root / "tests" / "fixtures" + fixtures.mkdir(parents=True) + fixture_src = Path(__file__).parent / "fixtures" / "fake_train.py" + (fixtures / "fake_train.py").write_text(fixture_src.read_text(encoding="utf-8"), encoding="utf-8") + + config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, + timeout_sec=1, + target_val_bpb=0.95, + candidate_command_override=[sys.executable, "tests/fixtures/fake_train.py"], + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.1) + AutoResearchDiscoverEnv.configure(bootstrap) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + + state = AutoResearchDiscoverEnv.create_initial_state("autoresearch") + + class FakeRenderer: + def __init__(self, payload: str) -> None: + self.payload = payload + + def parse_response(self, action): + return {"role": "assistant", "content": self.payload}, True + + def get_stop_sequences(self): + return [] + + class FakeSampler: + def __init__(self) -> None: + self.updated = False + + def update_states(self, states, parent_states, save=False): + self.updated = True + + payload = "<<<<<<< SEARCH\n# val_bpb: 1.100000\n=======\n# val_bpb: 0.900000\n>>>>>>> REPLACE" + sampler = FakeSampler() + env = AutoResearchDiscoverEnv(renderer=FakeRenderer(payload), initial_state=state, sampler=sampler, config=type("Cfg", (), { + "problem_type": "autoresearch", + "log_path": str(bootstrap.discover_log_dir), + "eval_timeout": config.eval_timeout, + "timeout": config.eval_timeout, + "num_cpus_per_task": 0, + "convo_prefix": [], + })()) + + result = asyncio.run(env.step([], 0)) + self.assertGreater(result.reward, 0.0) + self.assertTrue(result.metrics["format"]) + self.assertEqual(result.metrics["parsed_code"], payload) + self.assertTrue(sampler.updated) + + def test_env_step_uses_final_channel_and_persists_invalid_candidate(self) -> None: + with tempfile.TemporaryDirectory() as tmpdir: + root = Path(tmpdir) + (root / "program.md").write_text("Focus on val_bpb.", encoding="utf-8") + (root / "prepare.py").write_text("TIME_BUDGET = 1\n", encoding="utf-8") + (root / "train.py").write_text(MINIMAL_VALID_TRAIN_PY, encoding="utf-8") + + config = TTTAutoResearchConfig( + execution_backend="local", + max_concurrent_evaluations=1, + timeout_sec=1, + target_val_bpb=0.95, + ).normalized(root) + runner = AutoResearchRunner(root, config, Path(config.run_dir)) + bootstrap = runner.build_bootstrap(1.1) + AutoResearchDiscoverEnv.configure(bootstrap) + AutoResearchRewardEvaluator.configure(bootstrap, runner) + state = AutoResearchDiscoverEnv.create_initial_state("autoresearch") + + class FakeRenderer: + def parse_response(self, action): + content = ( + "<|channel|>analysis<|message|>\n" + "<<<<<<< SEARCH\n# val_bpb: 1.100000\n=======\n# val_bpb: 0.800000\n>>>>>>> REPLACE\n" + "<|channel|>final<|message|>\n" + "not a valid patch" + ) + return {"role": "assistant", "content": content}, True + + def get_stop_sequences(self): + return [] + + class FakeSampler: + def __init__(self) -> None: + self.updated = False + self.failed = False + + def update_states(self, states, parent_states, save=False): + self.updated = True + + def record_failed_rollout(self, initial_state): + self.failed = True + + sampler = FakeSampler() + env = AutoResearchDiscoverEnv(renderer=FakeRenderer(), initial_state=state, sampler=sampler, config=type("Cfg", (), { + "problem_type": "autoresearch", + "log_path": str(bootstrap.discover_log_dir), + "eval_timeout": config.eval_timeout, + "timeout": config.eval_timeout, + "num_cpus_per_task": 0, + "convo_prefix": [], + })()) + + result = asyncio.run(env.step([], 0)) + self.assertEqual(result.reward, 0.0) + self.assertFalse(result.metrics["format"]) + self.assertEqual(result.metrics["parsed_code"], "not a valid patch") + self.assertEqual(result.metrics["candidate_status"], "invalid_candidate") + self.assertTrue(sampler.failed) + history_path = Path(config.run_dir) / "history.jsonl" + self.assertTrue(history_path.exists()) + self.assertEqual(len(history_path.read_text(encoding="utf-8").splitlines()), 1) + if __name__ == "__main__": unittest.main() diff --git a/tests/test_prompt_builder.py b/tests/test_prompt_builder.py index 9c20751a..64f05b1a 100644 --- a/tests/test_prompt_builder.py +++ b/tests/test_prompt_builder.py @@ -6,62 +6,47 @@ class PromptBuilderTests(unittest.TestCase): - def test_prompt_is_single_rollout_specific(self) -> None: + def test_prompt_matches_current_rollout_contract(self) -> None: prompt = build_rollout_prompt( - state_ctx="You are iteratively optimizing val_bpb.\nCurrent val_bpb (lower is better): 1.020000", - construction_section=( - "You may want to start your search from the current training script shown above.\n" - "This is the current starting point selected by the search procedure.\n" - "Preserve a working script, but do not limit yourself to tiny hyperparameter tweaks.\n" - "Pursue bold, high-upside changes when they are technically coherent and likely to materially improve val_bpb.\n" - "You are encouraged to explore meaningfully different directions if the current approach appears saturated." - ), - code_section=( - "Reason about how you could further improve this training script under the fixed 5-minute training budget.\n" - "Hyperparameter tuning is allowed, but do not stop there: pursue stronger algorithmic, architectural, data-flow, attention, optimization, or systems ideas when they could deliver a step-change improvement.\n" - "Prefer edits that are technically coherent and high-upside, even if they are more ambitious than simple hill-climbing.\n" - "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" - "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" - "Do not refactor unrelated code, but do make all integration edits required for the new idea to work cleanly.\n" - "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." - ), + state_ctx="You are iteratively optimizing val_bpb.\nCurrent val_bpb (lower is better): 1.020000\nTarget: 0.970000", + construction_section="unused construction", + code_section="unused code", ) - self.assertIn("expert machine learning researcher", prompt) - self.assertIn("## Problem", prompt) - self.assertIn("## Budget & Resources", prompt) - self.assertIn("## AutoResearch Invariants", prompt) - self.assertIn("## Rules", prompt) - self.assertIn("You are iteratively optimizing val_bpb.", prompt) - self.assertIn("You may want to start your search from the current training script shown above.", prompt) - self.assertIn("This is the current starting point selected by the search procedure.", prompt) - self.assertIn("do not limit yourself to tiny hyperparameter tweaks", prompt) - self.assertIn("Pursue bold, high-upside changes", prompt) - self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) - self.assertIn("Hyperparameter tuning is allowed, but do not stop there", prompt) - self.assertIn("technically coherent and high-upside", prompt) - self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) - self.assertIn("do make all integration edits required", prompt) - self.assertIn("Maximum sequence length is `2048`", prompt) - self.assertIn("Validation uses the pinned shard `06542`", prompt) - self.assertIn("vocab size `8192`", prompt) + self.assertIn("single-file language-model training script", prompt) + self.assertIn("You are producing exactly one candidate patch", prompt) + self.assertIn("## Objective", prompt) + self.assertIn("## Fixed Project Facts", prompt) + self.assertIn("## Non-Negotiable Constraints", prompt) + self.assertIn("## Fixed Training / Evaluation Facts", prompt) + self.assertIn("## Fixed Data Pipeline Contract", prompt) + self.assertIn("## Fixed Model / Evaluation Contract", prompt) + self.assertIn("## What You May Change", prompt) + self.assertIn("## Technical Guidance", prompt) + self.assertIn("## Budget-Aware Reasoning Requirement", prompt) + self.assertIn("## What Good Edits Usually Look Like", prompt) + self.assertIn("## What To Avoid", prompt) + self.assertIn("## Output Requirements", prompt) + self.assertIn("## Current Starting Point", prompt) + self.assertIn("300 seconds of wall-clock training time", prompt) + self.assertIn("maximum sequence length is fixed at `2048`", prompt) + self.assertIn("evaluation token budget is fixed at `40 * 524288`", prompt) + self.assertIn("validation is pinned to shard `06542`", prompt) + self.assertIn("tokenizer vocabulary size is fixed at `8192`", prompt) self.assertIn("forward(x, y, reduction='none')", prompt) self.assertIn("TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0", prompt) - self.assertIn("Preserve the final summary prints", prompt) - self.assertIn("Return only one or more exact SEARCH/REPLACE edit blocks for `train.py`", prompt) - self.assertIn("Prefer as few patch blocks as needed", prompt) - self.assertIn("Treat each SEARCH block like an exact `old_string` tool argument", prompt) - self.assertIn("Do not return standalone code fragments", prompt) - self.assertIn("Do not wrap the answer in JSON", prompt) - self.assertIn("Do not wrap the answer in markdown code fences", prompt) - self.assertIn("Do not abbreviate with `...` or placeholders", prompt) - self.assertIn("## Example Response", prompt) - self.assertIn("<<<<<<< SEARCH", prompt) - self.assertIn(">>>>>>> REPLACE", prompt) - self.assertNotIn("Baseline val_bpb from the original script", prompt) - self.assertNotIn("LOOP FOREVER", prompt) - self.assertNotIn("results.tsv", prompt) - self.assertNotIn("git reset", prompt) - self.assertNotIn("NEVER STOP", prompt) + self.assertIn("`DEVICE_BATCH_SIZE * MAX_SEQ_LEN`", prompt) + self.assertIn('`WINDOW_PATTERN = "SSSL"` may be less efficient than `"L"`', prompt) + self.assertIn("Return only exact search-and-replace patch blocks", prompt) + self.assertIn("", prompt) + self.assertIn("", prompt) + self.assertIn("", prompt) + self.assertIn("", prompt) + self.assertIn("Current val_bpb (lower is better): 1.020000", prompt) + self.assertIn("Target: 0.970000", prompt) + self.assertNotIn("prepare.py", prompt) + self.assertNotIn("pyproject.toml", prompt) + self.assertNotIn("unused construction", prompt) + self.assertNotIn("unused code", prompt) if __name__ == "__main__": diff --git a/tests/test_runner.py b/tests/test_runner.py index f47d7e08..a661ce12 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -12,6 +12,7 @@ from ttt_autoresearch.runner import ( AutoResearchRunner, _find_top_level_undefined_name, + extract_patch_payload, parse_patch_candidate_for_state, parse_val_bpb, ) @@ -35,14 +36,45 @@ def test_parse_candidate_accepts_search_replace_patch(self) -> None: self.assertEqual(candidate.patch_block_count, 1) self.assertEqual(candidate.train_py, "print(1)\n") + def test_parse_candidate_accepts_tag_patch(self) -> None: + candidate = parse_patch_candidate_for_state( + "\nprint(0)\n\n\nprint(1)\n", + "print(0)\n", + ) + self.assertEqual(candidate.candidate_format, "tag_patch") + self.assertEqual(candidate.patch_block_count, 1) + self.assertEqual(candidate.train_py, "print(1)\n") + def test_parse_candidate_extracts_patch_from_wrapper_text(self) -> None: candidate = parse_patch_candidate_for_state( - "Here is the patch\n<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE\nDone.", + "Here is the patch\n\nprint(0)\n\n\nprint(1)\n\nDone.", "print(0)\n", ) - self.assertEqual(candidate.candidate_format, "search_replace_patch_extracted") + self.assertEqual(candidate.candidate_format, "tag_patch_extracted") self.assertEqual(candidate.train_py, "print(1)\n") + def test_extract_patch_payload_prefers_gpt_oss_final_channel(self) -> None: + payload = ( + "<|channel|>analysis<|message|>thinking\n" + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(2)\n>>>>>>> REPLACE\n" + "<|channel|>final<|message|>" + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE" + ) + self.assertEqual( + extract_patch_payload(payload), + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE", + ) + + def test_extract_patch_payload_prefers_text_after_kimi_think_block(self) -> None: + payload = ( + "\ninternal reasoning\n<<<<<<< SEARCH\nprint(0)\n=======\nprint(2)\n>>>>>>> REPLACE\n\n" + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE" + ) + self.assertEqual( + extract_patch_payload(payload), + "<<<<<<< SEARCH\nprint(0)\n=======\nprint(1)\n>>>>>>> REPLACE", + ) + def test_parse_val_bpb(self) -> None: stdout = "---\nval_bpb: 0.997900\n" self.assertEqual(parse_val_bpb(stdout), 0.9979) diff --git a/ttt_autoresearch/env.py b/ttt_autoresearch/env.py index c9eff46e..d6c2864b 100644 --- a/ttt_autoresearch/env.py +++ b/ttt_autoresearch/env.py @@ -5,11 +5,15 @@ import json from typing import Any, ClassVar +import tinker + from ttt_autoresearch.config import BootstrapContext from ttt_autoresearch.discover_compat import Environment, State, VerifyResult from ttt_autoresearch.prompt_builder import build_prompt_for_state from ttt_autoresearch.reward import AutoResearchRewardEvaluator -from ttt_autoresearch.runner import parse_patch_candidate_for_state +from ttt_autoresearch.runner import extract_patch_payload, parse_patch_candidate_for_state +from ttt_discover.rl.types import StepResult +from ttt_discover.tinker_utils import logtree class AutoResearchState(State): def __init__( @@ -169,6 +173,46 @@ def _get_code_languages(self) -> list[str]: def _should_keep_code_separators(self) -> bool: return False + async def step(self, action: list[int], step_idx: int) -> StepResult: + message, parse_success = self.renderer.parse_response(action) + response = message["content"] + if not isinstance(response, str): + raise ValueError(f"Expected string response content, got {type(response)!r}") + + parsed_code = extract_patch_payload(response) + correct_format = float(parse_success) and float(self.check_format(parsed_code)) + + outs = await self.check_answer(parsed_code, step_idx) + logtree.log_text(f"Problem: {self.get_question()[:200]}...") + logtree.log_text(f"Response: {message['content']}") + logtree.log_text( + f"Format Valid: {'✓' if correct_format else '✗'}, " + f"Reward: {outs.reward:.4f}, Correctness: {outs.correctness:.4f}, " + f"Raw Score: {outs.raw_score:.4f}, Msg: {outs.msg}" + ) + + metrics = self._build_metrics(outs, correct_format, message, parsed_code) + step_result = StepResult( + reward=outs.reward, + episode_done=True, + next_observation=tinker.ModelInput.empty(), + next_stop_condition=self.stop_condition, + metrics=metrics, + ) + + if outs.correctness > 0: + try: + next_state = self._create_next_state(step_idx, parsed_code, outs) + self.sampler.update_states([next_state], [self.initial_state], save=False) + except Exception as exc: + logtree.log_text(f"Failed to create next state: {exc}") + if hasattr(self.sampler, "record_failed_rollout"): + self.sampler.record_failed_rollout(self.initial_state) + elif hasattr(self.sampler, "record_failed_rollout"): + self.sampler.record_failed_rollout(self.initial_state) + + return step_result + def get_question(self) -> str: if self.bootstrap is None: raise RuntimeError("AutoResearchDiscoverEnv is not configured.") @@ -187,17 +231,6 @@ def check_format(self, parsed_code: str) -> bool: return True async def check_answer(self, parsed_code: str, step: int) -> VerifyResult: - if not self.check_format(parsed_code): - return VerifyResult( - reward=0.0, - msg="Invalid candidate train.py patch payload.", - correctness=0.0, - raw_score=float(self.initial_state.current_best_val_bpb), - result_construction=[], - stdout="", - metrics={"candidate_status": "invalid_candidate"}, - ) - loop = asyncio.get_running_loop() out = await loop.run_in_executor(None, self._run_reward, parsed_code) return VerifyResult( diff --git a/ttt_autoresearch/prompt_builder.py b/ttt_autoresearch/prompt_builder.py index 3ce21772..7c9e0c68 100644 --- a/ttt_autoresearch/prompt_builder.py +++ b/ttt_autoresearch/prompt_builder.py @@ -4,7 +4,7 @@ CONSTRUCTION_SECTION = ( - "You may want to start your search from the current training script shown above.\n" + "You must start your search from the current training script shown above.\n" "This is the current starting point selected by the search procedure.\n" "Preserve a working script, but do not limit yourself to tiny hyperparameter tweaks.\n" "Pursue bold, high-upside changes when they are technically coherent and likely to materially improve val_bpb.\n" @@ -16,7 +16,7 @@ "Hyperparameter tuning is allowed, but do not stop there: pursue stronger algorithmic, architectural, data-flow, attention, optimization, or systems ideas when they could deliver a step-change improvement.\n" "Prefer edits that are technically coherent and high-upside, even if they are more ambitious than simple hill-climbing.\n" "Try different algorithmic ideas, architecture changes, optimizer and schedule changes, batching changes, or other training heuristics.\n" - "Moderate increases in VRAM are acceptable if they lead to meaningful gains.\n" + "Minor increases in VRAM are acceptable if they lead to meaningful gains.\n" "Do not refactor unrelated code, but do make all integration edits required for the new idea to work cleanly.\n" "Unless you make a meaningful improvement in `val_bpb`, you will not be rewarded." ) @@ -28,74 +28,191 @@ def build_rollout_prompt( construction_section: str, code_section: str, ) -> str: - return f"""You are an expert machine learning researcher and systems engineer optimizing a language-model training script. + return f"""You are an expert machine learning researcher and systems engineer optimizing a single-file language-model training script. -Your task is to improve `train.py` so that it achieves a lower `val_bpb` under the fixed AutoResearch evaluation budget. +Your task is to edit `train.py` so that, when run under the fixed AutoResearch harness, it achieves a lower validation bits-per-byte (`val_bpb`). -## Problem +You are producing exactly one candidate patch to the current `train.py`. -Improve the `train.py` program so that the resulting training run achieves a lower validation bits-per-byte (`val_bpb`). +## Objective -Everything in `train.py` is fair game: +Modify `train.py` to improve final `val_bpb` under the fixed 5-minute training budget. + +Lower `val_bpb` is better. + +Your goal is not to make the code cleaner, more general, or more reusable. Your goal is to improve the measured validation result under the fixed harness while preserving a working script. + +## Fixed Project Facts + +This is a simplified single-GPU AutoResearch / nanochat-style setup in which: +- `train.py` is the only editable file +- runs are compared under a fixed wall-clock budget +- the optimization target is validation bits-per-byte (`val_bpb`) +- lower `val_bpb` is the only thing that matters + +## Non-Negotiable Constraints + +Treat all of the following as fixed: +- only `train.py` may be edited +- the evaluation harness is fixed +- the dataset is fixed +- the tokenizer setup is fixed +- do not add dependencies +- do not rely on new packages or changes to environment setup +- use only Python standard library modules and packages already clearly part of this codebase + +## Fixed Training / Evaluation Facts + +- the training budget is exactly 300 seconds of wall-clock training time, excluding startup / compilation +- maximum sequence length is fixed at `2048` +- evaluation token budget is fixed at `40 * 524288` +- validation is pinned to shard `06542` +- tokenizer vocabulary size is fixed at `8192` +- the BOS token and BOS-aligned packing behavior are fixed +- `val_bpb` is vocabulary-size-independent +- results must remain comparable under the fixed harness + +## Fixed Data Pipeline Contract + +Your edited `train.py` must remain compatible with the existing pipeline. + +In particular: +- the dataloader uses BOS-aligned packing +- every row starts with BOS +- documents are packed with best-fit packing to minimize cropping +- when no document fits, the shortest buffered document is cropped to fill the row exactly +- the dataloader is designed for full utilization without padding +- `make_dataloader(tokenizer, B, T, split)` must continue to work with your script + +Do not introduce assumptions that would break BOS alignment, packed rows, or the existing input / target layout. + +## Fixed Model / Evaluation Contract + +Your edited script must preserve all of the following: +- the model must continue to support `forward(x, y, reduction='none')` +- when `targets` are provided, the model must still return a loss compatible with the current evaluator +- the final summary prints must remain present +- especially preserve the line beginning exactly with `val_bpb:` +- keep `TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0` + +## What You May Change + +Everything inside `train.py` is fair game, including: - architecture - optimizer +- schedules - hyperparameters - training loop -- batch size +- batch geometry - model size - -**Lower `val_bpb` values are better** - they indicate a stronger model under the fixed evaluation budget. - -## Budget & Resources -- **Time budget**: 5 minutes of wall-clock training time on a single NVIDIA H100 GPU -- **Evaluation harness**: fixed AutoResearch runner -- **VRAM**: moderate increases are acceptable for meaningful gains, but avoid wasteful blowups - -## AutoResearch Invariants -- `prepare.py` and the evaluation protocol are fixed and cannot be changed -- Maximum sequence length is `2048` -- Validation uses the pinned shard `06542` -- The tokenizer / vocabulary setup is fixed at vocab size `8192` -- The training script must remain compatible with the existing BOS-aligned bin-packing data pipeline -- The model implementation must continue to support `forward(x, y, reduction='none')` -- Keep `TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0` -- Preserve the final summary prints, especially the line that starts with `val_bpb:` - -## Rules -- You may only edit `train.py` -- Do not modify `prepare.py`, dependencies, or the evaluation harness -- Return only one or more exact SEARCH/REPLACE edit blocks for `train.py` -- Prefer as few patch blocks as needed, but use as many exact patch blocks as necessary to make the edit apply cleanly -- Each SEARCH block must copy exact contiguous text from the current `train.py` -- If you change constants or a small code region, include enough surrounding context in SEARCH to make the patch unique -- Treat each SEARCH block like an exact `old_string` tool argument: it must match exactly once -- Do not return the full file -- Do not return standalone code fragments -- Do not wrap the answer in JSON -- Do not wrap the answer in markdown code fences -- Do not include any commentary, rationale, summary, or prose before or after the patch -- Do not abbreviate with `...` or placeholders; each replacement must be fully expanded source code -- Each patch block must use exactly this format: -<<<<<<< SEARCH +- attention pattern +- initialization +- regularization +- precision and memory-management choices inside `train.py` + +## Technical Guidance + +Treat `train.py` as the full research surface: it contains the model, optimizer, batching choices, and training loop. + +The fixed 5-minute budget exists so experiments remain comparable even when model size, batch size, architecture, or optimization strategy change. The target metric is `val_bpb`; lower is better. + +The most important practical knobs in this setup are: +- `DEVICE_BATCH_SIZE` +- `TOTAL_BATCH_SIZE` +- `DEPTH` +- `WINDOW_PATTERN` +- hidden width / head geometry +- optimizer and schedule choices +- activation and memory behavior inside `train.py` + +Important practical facts: +- the number of tokens per forward/backward pass is `DEVICE_BATCH_SIZE * MAX_SEQ_LEN` +- `DEPTH` is a primary model-complexity knob, and many other dimensions scale with it +- `WINDOW_PATTERN = "SSSL"` may be less efficient than `"L"` on some systems +- sequence length, dataset, vocabulary size, tokenizer, and evaluation protocol are fixed outside `train.py`, so only optimize through knobs that still live in `train.py` + +## Budget-Aware Reasoning Requirement + +Reason about the script as a 5-minute optimization problem, not as an unlimited-training problem. + +Before choosing a change, consider: +- what is most likely limiting performance right now: optimization, throughput, memory, model size, or architecture +- whether the current script is undertrained, overbuilt, memory-inefficient, unstable, or throughput-limited +- whether your change increases or decreases: + - activation memory + - optimizer-state memory + - parameter memory + - step time + - tokens/sec + - total useful optimization completed within 5 minutes + +Important practical principles: +- sequence length is fixed at `2048` +- larger `DEVICE_BATCH_SIZE`, deeper models, wider models, extra residual branches, extra embedding streams, and more expensive attention patterns can all hurt memory or throughput +- if you increase one major cost driver, compensate elsewhere +- a somewhat smaller but faster or stabler model can beat a larger model that gets fewer useful steps +- avoid changes that are likely to OOM or materially reduce useful training under the fixed budget +- bold changes are allowed, but they must be coherent, dependency-safe, memory-aware, and budget-aware + +## What Good Edits Usually Look Like + +Good candidates usually do one coherent thing well: +- improve quality-per-token under a short training horizon +- improve throughput under the fixed wall-clock budget +- improve early optimization stability +- improve memory-efficiency enough to unlock a better tradeoff +- choose a more coherent scaling point across depth, width, heads, batch, and optimizer +- simplify components if the current design is overbuilt for the budget +- make a targeted architectural change with a clear expected payoff + +Prefer one coherent direction over many unrelated tweaks. + +## What To Avoid + +Avoid: +- elegant changes with weak expected effect on `val_bpb` +- changes that mainly help only at much longer training horizons +- fragile edits likely to break compilation or runtime +- edits that silently violate the data pipeline or output contract +- extra complexity without a clear budget-aware reason +- memory increases without a clear speed/quality payoff +- changes to fixed knobs controlled outside `train.py` + +## Output Requirements + +Return only exact search-and-replace patch blocks for `train.py`. + +Do not return the full file. +Do not return standalone code fragments. +Do not return JSON. +Do not use markdown code fences. +Do not include commentary, rationale, summary, or any prose before or after the patch. +Do not abbreviate with `...` or placeholders. +Each replacement must be fully expanded source code. + +Each patch block must use exactly this format: + + [exact existing text from the current train.py] -======= + + [new replacement text] ->>>>>>> REPLACE -- The SEARCH text must match the current starting `train.py` exactly -- Propose exactly one candidate for this rollout -- Optimize for the lowest `val_bpb` under the fixed time budget -- Prefer simpler changes when improvement is similar (large changes such as architectural or similar changes are preferred when improvement is large) - -## Example Response -<<<<<<< SEARCH -TOTAL_BATCH_SIZE = 524288 -======= -TOTAL_BATCH_SIZE = 393216 ->>>>>>> REPLACE + + +Patch rules: +- each `` block must copy exact contiguous text from the current `train.py` +- each `` block must match the current file exactly once +- include enough surrounding context to make each patch unique and apply cleanly +- use as few patch blocks as possible, but as many as necessary for correctness +- ensure the final result is a working script + +Optimize for the lowest `val_bpb` under the fixed evaluation budget, subject to actually running successfully. + +## Current Starting Point + +Below is the current `train.py` state, current score context, target score, and previous run context: {state_ctx} -{construction_section} -{code_section} """ diff --git a/ttt_autoresearch/runner.py b/ttt_autoresearch/runner.py index 0480c430..ee5fa1a8 100644 --- a/ttt_autoresearch/runner.py +++ b/ttt_autoresearch/runner.py @@ -25,6 +25,14 @@ r"<<<<<<< SEARCH\n(.*?)\n=======\n(.*?)\n>>>>>>> REPLACE", re.DOTALL, ) +TAG_PATCH_BLOCK_RE = re.compile( + r"\s*\n?(.*?)\n?\s*\s*\s*\n?(.*?)\n?\s*", + re.DOTALL | re.IGNORECASE, +) +GPT_OSS_ANALYSIS_MARKER = "<|channel|>analysis<|message|>" +GPT_OSS_FINAL_MARKER = "<|channel|>final<|message|>" +KIMI_THINK_OPEN = "" +KIMI_THINK_CLOSE = "" VAL_BPB_PRINT_RE = re.compile(r"print\(\s*f?[\"']val_bpb:\s*", re.MULTILINE) FORWARD_WITH_REDUCTION_RE = re.compile(r"def\s+forward\s*\([^)]*\breduction\s*=", re.MULTILINE) _KNOWN_PREPARE_CONSTANTS = {"MAX_SEQ_LEN": 2048} @@ -82,28 +90,48 @@ def parse_patch_candidate(candidate_json: str) -> PatchCandidate: def parse_patch_candidate_for_state(candidate_json: str, current_train_py: str) -> PatchCandidate: - stripped = candidate_json.strip() + stripped = extract_patch_payload(candidate_json).strip() if not stripped: raise ValueError("Candidate must not be empty.") - updated_train_py, patch_block_count, extracted = apply_search_replace_patch(stripped, current_train_py) + updated_train_py, patch_block_count, extracted, parser_name = apply_search_replace_patch(stripped, current_train_py) lines_changed = count_lines_changed(current_train_py, updated_train_py) if lines_changed == 0: raise ValueError("Patch did not change train.py.") + candidate_format = parser_name + if extracted: + candidate_format = f"{candidate_format}_extracted" return PatchCandidate( - summary="search_replace_patch_candidate", - rationale="model returned search/replace patch", + summary=f"{parser_name}_candidate", + rationale="model returned patch edits", train_py=updated_train_py, - candidate_format="search_replace_patch_extracted" if extracted else "search_replace_patch", + candidate_format=candidate_format, patch_block_count=patch_block_count, lines_changed=lines_changed, ) -def apply_search_replace_patch(patch_text: str, current_train_py: str) -> tuple[str, int, bool]: - blocks = list(SEARCH_REPLACE_BLOCK_RE.finditer(patch_text)) +def extract_patch_payload(candidate_text: str) -> str: + if GPT_OSS_FINAL_MARKER in candidate_text: + final_text = candidate_text.split(GPT_OSS_FINAL_MARKER, 1)[1].strip() + if final_text: + return final_text + if KIMI_THINK_OPEN in candidate_text and KIMI_THINK_CLOSE in candidate_text: + final_text = candidate_text.split(KIMI_THINK_CLOSE, 1)[1].strip() + if final_text: + return final_text + return candidate_text + + +def apply_search_replace_patch(patch_text: str, current_train_py: str) -> tuple[str, int, bool, str]: + parser_name = "search_replace_patch" + blocks = list(TAG_PATCH_BLOCK_RE.finditer(patch_text)) + if blocks: + parser_name = "tag_patch" + else: + blocks = list(SEARCH_REPLACE_BLOCK_RE.finditer(patch_text)) if not blocks: - raise ValueError("Candidate must contain one or more SEARCH/REPLACE patch blocks.") + raise ValueError("Candidate must contain one or more valid patch blocks.") updated = current_train_py for match in blocks: @@ -119,7 +147,7 @@ def apply_search_replace_patch(patch_text: str, current_train_py: str) -> tuple[ updated = updated.replace(search_text, replace_text, 1) extracted = _has_non_block_wrapper_text(patch_text, blocks) - return updated, len(blocks), extracted + return updated, len(blocks), extracted, parser_name def count_lines_changed(previous_text: str, updated_text: str) -> int: From 86db5da4657ae8639afc2465261b694e14d4ab29 Mon Sep 17 00:00:00 2001 From: Aum Desai Date: Fri, 13 Mar 2026 11:38:38 +1100 Subject: [PATCH 17/17] Update target val_bpb and prompt test expectations --- tests/test_env_smoke.py | 19 ++++++++++--------- ttt_autoresearch/config.py | 2 +- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/tests/test_env_smoke.py b/tests/test_env_smoke.py index 8fc1359a..334d09de 100644 --- a/tests/test_env_smoke.py +++ b/tests/test_env_smoke.py @@ -82,19 +82,20 @@ def test_env_prompt_and_reward_flow(self) -> None: self.assertIn("Current val_bpb (lower is better): 1.100000", prompt) self.assertIn("Target: 0.95", prompt) self.assertIn("Here is the last code we ran", prompt) - self.assertIn("## Problem", prompt) - self.assertIn("## Budget & Resources", prompt) - self.assertIn("## Rules", prompt) - self.assertIn("You may want to start your search from the current training script shown above.", prompt) - self.assertIn("This is the current starting point selected by the search procedure.", prompt) - self.assertIn("Pursue bold, high-upside changes", prompt) - self.assertIn("Reason about how you could further improve this training script under the fixed 5-minute training budget.", prompt) - self.assertIn("Hyperparameter tuning is allowed, but do not stop there", prompt) - self.assertIn("Moderate increases in VRAM are acceptable if they lead to meaningful gains.", prompt) + self.assertIn("## Objective", prompt) + self.assertIn("## Fixed Project Facts", prompt) + self.assertIn("## Non-Negotiable Constraints", prompt) + self.assertIn("## Fixed Training / Evaluation Facts", prompt) + self.assertIn("## Technical Guidance", prompt) + self.assertIn("## Output Requirements", prompt) + self.assertIn("single-file language-model training script", prompt) + self.assertIn("You are producing exactly one candidate patch", prompt) + self.assertIn("The fixed 5-minute budget exists so experiments remain comparable", prompt) self.assertNotIn("Baseline val_bpb from the original script", prompt) self.assertNotIn("LOOP FOREVER", prompt) self.assertNotIn("results.tsv", prompt) self.assertNotIn("git reset", prompt) + self.assertNotIn("prepare.py", prompt) self.assertIn("TOTAL_BATCH_SIZE % (DEVICE_BATCH_SIZE * MAX_SEQ_LEN) == 0", prompt) payload = "<<<<<<< SEARCH\n# val_bpb: 1.100000\n=======\n# val_bpb: 0.900000\n>>>>>>> REPLACE" self.assertTrue(env.check_format(payload)) diff --git a/ttt_autoresearch/config.py b/ttt_autoresearch/config.py index 47ff0337..6fc7f49c 100644 --- a/ttt_autoresearch/config.py +++ b/ttt_autoresearch/config.py @@ -26,7 +26,7 @@ class TTTAutoResearchConfig: model_name: str = "openai/gpt-oss-120b" provider: str | None = None api_base: str | None = None - target_val_bpb: float | None = 0.97 + target_val_bpb: float | None = 0.96 max_steps: int = 12 groups_per_step: int = 2 samples_per_step: int = 8