Skip to content
Open
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 7 additions & 26 deletions core/framework/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from typing import Any

from framework.graph.edge import DEFAULT_MAX_TOKENS
from framework.llm.codex_backend import CODEX_API_BASE, build_codex_litellm_kwargs

# ---------------------------------------------------------------------------
# Low-level config file access
Expand Down Expand Up @@ -125,7 +126,6 @@ def get_worker_api_key() -> str | None:
return token
except ImportError:
pass

api_key_env_var = worker_llm.get("api_key_env_var")
if api_key_env_var:
return os.environ.get(api_key_env_var)
Expand All @@ -141,7 +141,7 @@ def get_worker_api_base() -> str | None:
return get_api_base()

if worker_llm.get("use_codex_subscription"):
return "https://chatgpt.com/backend-api/codex"
return CODEX_API_BASE
if worker_llm.get("use_kimi_code_subscription"):
return "https://api.kimi.com/coding"
if worker_llm.get("use_antigravity_subscription"):
Expand Down Expand Up @@ -169,23 +169,14 @@ def get_worker_llm_extra_kwargs() -> dict[str, Any]:
if worker_llm.get("use_codex_subscription"):
api_key = get_worker_api_key()
if api_key:
headers: dict[str, str] = {
"Authorization": f"Bearer {api_key}",
"User-Agent": "CodexBar",
}
account_id = None
try:
from framework.runner.runner import get_codex_account_id

account_id = get_codex_account_id()
if account_id:
headers["ChatGPT-Account-Id"] = account_id
except ImportError:
pass
return {
"extra_headers": headers,
"store": False,
"allowed_openai_params": ["store"],
}
return build_codex_litellm_kwargs(api_key, account_id=account_id)
return {}


Expand Down Expand Up @@ -274,7 +265,6 @@ def get_api_key() -> str | None:
return token
except ImportError:
pass

# Standard env-var path (covers ZAI Code and all API-key providers)
api_key_env_var = llm.get("api_key_env_var")
if api_key_env_var:
Expand Down Expand Up @@ -380,7 +370,7 @@ def get_api_base() -> str | None:
llm = get_hive_config().get("llm", {})
if llm.get("use_codex_subscription"):
# Codex subscription routes through the ChatGPT backend, not api.openai.com.
return "https://chatgpt.com/backend-api/codex"
return CODEX_API_BASE
if llm.get("use_kimi_code_subscription"):
# Kimi Code uses an Anthropic-compatible endpoint (no /v1 suffix).
return "https://api.kimi.com/coding"
Expand Down Expand Up @@ -415,23 +405,14 @@ def get_llm_extra_kwargs() -> dict[str, Any]:
if llm.get("use_codex_subscription"):
api_key = get_api_key()
if api_key:
headers: dict[str, str] = {
"Authorization": f"Bearer {api_key}",
"User-Agent": "CodexBar",
}
account_id = None
try:
from framework.runner.runner import get_codex_account_id

account_id = get_codex_account_id()
if account_id:
headers["ChatGPT-Account-Id"] = account_id
except ImportError:
pass
return {
"extra_headers": headers,
"store": False,
"allowed_openai_params": ["store"],
}
return build_codex_litellm_kwargs(api_key, account_id=account_id)
return {}


Expand Down
20 changes: 15 additions & 5 deletions core/framework/graph/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,13 +351,15 @@ def __init__(
def system_prompt(self) -> str:
return self._system_prompt

def update_system_prompt(self, new_prompt: str) -> None:
def update_system_prompt(self, new_prompt: str, output_keys: list[str] | None = None) -> None:
"""Update the system prompt.

Used in continuous conversation mode at phase transitions to swap
Layer 3 (focus) while preserving the conversation history.
"""
self._system_prompt = new_prompt
if output_keys is not None:
self._output_keys = output_keys
self._meta_persisted = False # re-persist with new prompt

def set_current_phase(self, phase_id: str) -> None:
Expand Down Expand Up @@ -771,7 +773,7 @@ async def compact(
delete_before = recent_messages[0].seq if recent_messages else self._next_seq
await self._store.delete_parts_before(delete_before)
await self._store.write_part(summary_msg.seq, summary_msg.to_storage_dict())
await self._store.write_cursor({"next_seq": self._next_seq})
await self._write_cursor_update({"next_seq": self._next_seq})

self._messages = [summary_msg] + recent_messages
self._last_api_input_tokens = None # reset; next LLM call will recalibrate
Expand Down Expand Up @@ -975,7 +977,7 @@ async def compact_preserving_structure(
# Write kept structural messages (they may have been modified)
for msg in kept_structural:
await self._store.write_part(msg.seq, msg.to_storage_dict())
await self._store.write_cursor({"next_seq": self._next_seq})
await self._write_cursor_update({"next_seq": self._next_seq})

# Reassemble: reference + kept structural (in original order) + recent
self._messages = [ref_msg] + kept_structural + recent_messages
Expand Down Expand Up @@ -1012,7 +1014,7 @@ async def clear(self) -> None:
"""Remove all messages, keep system prompt, preserve ``_next_seq``."""
if self._store:
await self._store.delete_parts_before(self._next_seq)
await self._store.write_cursor({"next_seq": self._next_seq})
await self._write_cursor_update({"next_seq": self._next_seq})
self._messages.clear()
self._last_api_input_tokens = None

Expand Down Expand Up @@ -1047,14 +1049,22 @@ def export_summary(self) -> str:

# --- Persistence internals ---------------------------------------------

async def _write_cursor_update(self, data: dict[str, Any]) -> None:
"""Merge cursor updates instead of clobbering existing crash-recovery state."""
if self._store is None:
return
cursor = await self._store.read_cursor() or {}
cursor.update(data)
await self._store.write_cursor(cursor)

async def _persist(self, message: Message) -> None:
"""Write-through a single message. No-op when store is None."""
if self._store is None:
return
if not self._meta_persisted:
await self._persist_meta()
await self._store.write_part(message.seq, message.to_storage_dict())
await self._store.write_cursor({"next_seq": self._next_seq})
await self._write_cursor_update({"next_seq": self._next_seq})

async def _persist_meta(self) -> None:
"""Lazily write conversation metadata to the store (called once)."""
Expand Down
Loading
Loading