diff --git a/src/hooks/think-mode/index.test.ts b/src/hooks/think-mode/index.test.ts index e135412db4..911c4dce61 100644 --- a/src/hooks/think-mode/index.test.ts +++ b/src/hooks/think-mode/index.test.ts @@ -153,3 +153,50 @@ describe("createThinkModeHook", () => { expect(output.message.model).toBeUndefined() }) }) + +describe("think-mode: regression tests for issue #2382", () => { + const sessionID = "regression-2382" + + beforeEach(() => { + clearThinkModeState(sessionID) + }) + + it("does NOT upgrade gpt-5-nano on Korean think-mode trigger text", async () => { + const hook = createThinkModeHook() + const input = createHookInput({ + sessionID, + providerID: "opencode", + modelID: "gpt-5-nano", + }) + const output = createHookOutput( + "너와 인공지능 엔진에게 이미지와 참조 자료를 어떻게 전달할지 고민하고 있었는데" + ) + + // when + await hook["chat.message"](input, output) + + expect(output.message.variant).toBeUndefined() + expect(output.message.model).toBeUndefined() + }) + + it("still upgrades claude-sonnet-4-6 on the same Korean think-mode trigger text", async () => { + const hook = createThinkModeHook() + const input = createHookInput({ + sessionID, + providerID: "anthropic", + modelID: "claude-sonnet-4-6", + }) + const output = createHookOutput( + "너와 인공지능 엔진에게 이미지와 참조 자료를 어떻게 전달할지 고민하고 있었는데" + ) + + // when + await hook["chat.message"](input, output) + + expect(output.message.variant).toBe("high") + expect(output.message.model).toEqual({ + providerID: "anthropic", + modelID: "claude-sonnet-4-6-high", + }) + }) +}) diff --git a/src/hooks/think-mode/switcher.test.ts b/src/hooks/think-mode/switcher.test.ts index e56ec5823d..670717e0f3 100644 --- a/src/hooks/think-mode/switcher.test.ts +++ b/src/hooks/think-mode/switcher.test.ts @@ -69,6 +69,13 @@ describe("think-mode switcher", () => { expect(getHighVariant("llama-3-70b")).toBeNull() expect(getHighVariant("mistral-large")).toBeNull() }) + + it("should return null for gpt-5-nano (no reasoning variant on Zen)", () => { + // given gpt-5-nano — a lightweight model with no high variant on OpenCode Zen + // see: https://github.com/code-yeongyu/oh-my-openagent/issues/2382 + expect(getHighVariant("gpt-5-nano")).toBeNull() + expect(getHighVariant("opencode/gpt-5-nano")).toBeNull() + }) }) }) diff --git a/src/hooks/think-mode/switcher.ts b/src/hooks/think-mode/switcher.ts index f458d9b663..fe4ff28645 100644 --- a/src/hooks/think-mode/switcher.ts +++ b/src/hooks/think-mode/switcher.ts @@ -50,9 +50,11 @@ const HIGH_VARIANT_MAP: Record = { "gemini-3-1-pro-low": "gemini-3-1-pro-high", "gemini-3-flash": "gemini-3-flash-high", // GPT-5 + // NOTE: gpt-5-nano is intentionally excluded — it is a lightweight nano model + // that does not have a reasoning/high-effort variant on the OpenCode Zen provider. + // Mapping it to gpt-5-nano-high causes a "Model not found" error. "gpt-5": "gpt-5-high", "gpt-5-mini": "gpt-5-mini-high", - "gpt-5-nano": "gpt-5-nano-high", "gpt-5-pro": "gpt-5-pro-high", "gpt-5-chat-latest": "gpt-5-chat-latest-high", // GPT-5.1