diff --git a/.gitignore b/.gitignore index 50d76cc..7966262 100644 --- a/.gitignore +++ b/.gitignore @@ -181,4 +181,5 @@ db.sqlite3 .winterspec db.backup.sqlite3 .bin -.fly.toml.swp \ No newline at end of file +.fly.toml.swp +bun.lock diff --git a/lib/openai.ts b/lib/openai.ts new file mode 100644 index 0000000..e43dafe --- /dev/null +++ b/lib/openai.ts @@ -0,0 +1,16 @@ +import OpenAI from "openai" + +export const getOpenAiClient = () => { + const apiKey = process.env.OPENAI_API_KEY + + if (!apiKey) { + throw new Error("Missing OPENAI_API_KEY environment variable") + } + + const baseURL = process.env.OPENAI_BASE_URL?.trim() + + return new OpenAI({ + apiKey, + baseURL: baseURL && baseURL.length > 0 ? baseURL : undefined, + }) +} diff --git a/package.json b/package.json index b1295ad..955da55 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,9 @@ "type": "module", "dependencies": { "@tscircuit/footprinter": "^0.0.143", + "circuit-json": "^0.0.274", "kysely-bun-sqlite": "^0.3.2", + "openai": "^6.2.0", "react": "^18.3.1", "react-dom": "^18.3.1", "redaxios": "^0.5.1" diff --git a/routes/search.ts b/routes/search.ts new file mode 100644 index 0000000..eb29d6c --- /dev/null +++ b/routes/search.ts @@ -0,0 +1,306 @@ +import { readFile } from "node:fs/promises" +import { withWinterSpec } from "lib/with-winter-spec" +import { z } from "zod" +import { getOpenAiClient } from "lib/openai" + +type OpenAPISpec = { + paths: Record< + string, + { + get?: { + parameters?: Array<{ + name: string + in: string + required?: boolean + }> + } + } + > +} + +type EndpointSummary = { + method: "GET" + queryParams: Map +} + +let cachedOpenApiSummary: { + prompt: string + endpoints: Map +} | null = null + +const loadOpenApiSummary = async () => { + if (cachedOpenApiSummary) { + return cachedOpenApiSummary + } + + const openapiUrl = new URL("../docs/openapi.json", import.meta.url) + const spec = JSON.parse(await readFile(openapiUrl, "utf-8")) as OpenAPISpec + + const lines: string[] = [] + const endpoints = new Map() + + for (const [path, item] of Object.entries(spec.paths)) { + if (path === "/search" || path === "/api/search") continue + if (path.includes("{")) continue + if (path.includes("[")) continue + const getOperation = item.get + if (!getOperation) continue + + const queryParams = new Map() + const params = getOperation.parameters ?? [] + + for (const param of params) { + if (param.in !== "query") continue + queryParams.set(param.name, { + required: Boolean(param.required), + }) + } + + const queryDescription = + queryParams.size > 0 + ? ` query: ${Array.from(queryParams.entries()) + .map(([name, meta]) => `${name}${meta.required ? "!" : ""}`) + .join(", ")}` + : "" + + lines.push(`GET ${path}${queryDescription}`.trim()) + endpoints.set(path, { + method: "GET", + queryParams, + }) + } + + const promptLines = [ + "Available endpoints (GET only):", + ...lines, + 'Respond with JSON: { "endpoint": string, "params": object }.', + "Only choose from the endpoints listed above.", + ] + + cachedOpenApiSummary = { + prompt: promptLines.join("\n"), + endpoints, + } + + return cachedOpenApiSummary +} + +const parseJsonObject = (raw: string) => { + const trimmed = raw.trim() + try { + return JSON.parse(trimmed) + } catch (error) { + const start = trimmed.indexOf("{") + const end = trimmed.lastIndexOf("}") + if (start === -1 || end === -1 || end <= start) { + throw error + } + const substring = trimmed.slice(start, end + 1) + return JSON.parse(substring) + } +} + +const ensureParamsObject = (value: unknown) => { + if (value == null) return {} + if (typeof value !== "object" || Array.isArray(value)) { + throw new Error("params must be an object") + } + return value as Record +} + +export default withWinterSpec({ + auth: "none", + methods: ["GET"], + queryParams: z.object({ + q: z.string().min(1), + }), + jsonResponse: z.object({ + search_result: z.object({ + components: z.unknown(), + endpoint_used: z.string(), + filter_params: z.record(z.unknown()), + }), + }), +} as const)(async (req, ctx) => { + const query = req.query.q?.trim() + + if (!query) { + return ctx.error(400, { + error_code: "missing_query", + message: "Query parameter q is required", + }) + } + + const { prompt, endpoints } = await loadOpenApiSummary() + + let openaiClient: ReturnType + + try { + openaiClient = getOpenAiClient() + } catch (error) { + return ctx.error(500, { + error_code: "missing_openai_api_key", + message: "OPENAI_API_KEY environment variable is not configured", + }) + } + + const completion = await openaiClient.chat.completions.create({ + model: "gpt-4o-mini", + temperature: 0, + messages: [ + { + role: "system", + content: + "You plan API requests for the jlcsearch service. Return valid JSON only.", + }, + { + role: "user", + content: `${prompt}\n\nUser query: ${query}`, + }, + ], + }) + + const message = completion.choices[0]?.message + const messageContent = message?.content + let content = "" + + if (typeof messageContent === "string") { + content = messageContent + } else if (Array.isArray(messageContent)) { + content = (messageContent as Array) + .map((part: unknown) => { + if (typeof part === "string") return part + if ( + part && + typeof part === "object" && + "type" in part && + (part as { type: string }).type === "text" + ) { + const textPart = part as { text?: string } + return textPart.text ?? "" + } + return "" + }) + .join("") + } + + content = content.trim() + + if (!content) { + return ctx.error(502, { + error_code: "empty_llm_response", + message: "OpenAI did not return a response", + }) + } + + let parsed: { endpoint?: string; params?: Record } + + try { + parsed = parseJsonObject(content) + } catch (error) { + return ctx.error(502, { + error_code: "invalid_llm_response", + message: "Failed to parse OpenAI response", + }) + } + + const endpointUsed = parsed.endpoint + + if (!endpointUsed) { + return ctx.error(400, { + error_code: "missing_endpoint", + message: "OpenAI response did not include an endpoint", + }) + } + + const endpointMeta = endpoints.get(endpointUsed) + + if (!endpointMeta) { + return ctx.error(400, { + error_code: "invalid_endpoint", + message: `Endpoint ${endpointUsed} is not allowed`, + }) + } + + let paramsObject: Record + try { + paramsObject = ensureParamsObject(parsed.params) + } catch (error) { + return ctx.error(400, { + error_code: "invalid_params", + message: "OpenAI response params must be an object", + }) + } + + const sanitizedParams: Record = {} + + for (const [name, meta] of endpointMeta.queryParams.entries()) { + const rawValue = paramsObject[name] + + if (rawValue == null) { + if (meta.required) { + return ctx.error(400, { + error_code: "missing_required_param", + message: `Missing required parameter ${name}`, + }) + } + continue + } + + sanitizedParams[name] = String(rawValue) + } + + for (const key of Object.keys(paramsObject)) { + if (!endpointMeta.queryParams.has(key)) { + return ctx.error(400, { + error_code: "unexpected_param", + message: `Parameter ${key} is not supported for ${endpointUsed}`, + }) + } + } + + const url = new URL(req.url) + url.pathname = endpointUsed + url.search = new URLSearchParams(sanitizedParams).toString() + + let internalResponse: Response + + try { + internalResponse = await fetch(url.toString(), { + headers: { + Accept: "application/json", + }, + }) + } catch (error) { + return ctx.error(502, { + error_code: "internal_request_failed", + message: "Failed to call internal endpoint", + }) + } + + if (!internalResponse.ok) { + return ctx.error(internalResponse.status, { + error_code: "internal_request_failed", + message: `Internal endpoint responded with status ${internalResponse.status}`, + }) + } + + let data: any + + try { + data = await internalResponse.json() + } catch (error) { + return ctx.error(502, { + error_code: "invalid_internal_response", + message: "Internal endpoint did not return JSON", + }) + } + + return ctx.json({ + search_result: { + components: data?.components ?? data ?? null, + endpoint_used: endpointUsed, + filter_params: sanitizedParams, + }, + }) +}) diff --git a/tests/routes/search.test.ts b/tests/routes/search.test.ts new file mode 100644 index 0000000..8fdf561 --- /dev/null +++ b/tests/routes/search.test.ts @@ -0,0 +1,118 @@ +import { beforeEach, expect, test } from "bun:test" +import { getTestServer } from "tests/fixtures/get-test-server" + +const originalEnv = { + OPENAI_API_KEY: process.env.OPENAI_API_KEY, + OPENAI_BASE_URL: process.env.OPENAI_BASE_URL, +} + +let openAiRequests: Array<{ url: string; body: any }> + +beforeEach(() => { + openAiRequests = [] + + const port = 4100 + Math.floor(Math.random() * 2000) + const server = Bun.serve({ + port, + async fetch(req) { + const url = new URL(req.url) + + if (url.pathname === "/v1/chat/completions") { + const body = await req.json() + openAiRequests.push({ url: url.pathname, body }) + + return Response.json({ + id: "chatcmpl-test", + object: "chat.completion", + created: Date.now() / 1000, + model: body.model ?? "gpt-4o-mini", + choices: [ + { + index: 0, + finish_reason: "stop", + message: { + role: "assistant", + content: JSON.stringify({ + endpoint: "/leds/list", + params: { json: true }, + }), + }, + }, + ], + }) + } + + return new Response("not found", { status: 404 }) + }, + }) + + const previousKey = process.env.OPENAI_API_KEY + const previousBaseUrl = process.env.OPENAI_BASE_URL + + process.env.OPENAI_API_KEY = "test-openai-key" + process.env.OPENAI_BASE_URL = `http://127.0.0.1:${port}/v1` + + globalThis.deferredCleanupFns ??= [] + globalThis.deferredCleanupFns.push(async () => { + if (previousKey === undefined && originalEnv.OPENAI_API_KEY === undefined) { + delete process.env.OPENAI_API_KEY + } else { + process.env.OPENAI_API_KEY = + previousKey ?? originalEnv.OPENAI_API_KEY ?? process.env.OPENAI_API_KEY + } + + if ( + previousBaseUrl === undefined && + originalEnv.OPENAI_BASE_URL === undefined + ) { + delete process.env.OPENAI_BASE_URL + } else { + process.env.OPENAI_BASE_URL = + previousBaseUrl ?? + originalEnv.OPENAI_BASE_URL ?? + process.env.OPENAI_BASE_URL + } + + await server.stop() + }) +}) + +test("GET /search delegates to internal endpoints returned by OpenAI", async () => { + const { axios } = await getTestServer() + + const response = await axios.get("/search?q=leds") + + expect(response.status).toBe(200) + expect(response.data.search_result.endpoint_used).toBe("/leds/list") + expect(response.data.search_result.filter_params).toEqual({ json: "true" }) + expect(openAiRequests.length).toBe(1) + + const [{ body }] = openAiRequests + + expect(body.messages?.[1]?.content).toContain("GET /leds/list") + expect(body.messages?.[1]?.content).toContain("User query: leds") +}) + +test("GET /search returns error when OPENAI_API_KEY is missing", async () => { + delete process.env.OPENAI_API_KEY + + const { axios } = await getTestServer() + + let response: { status: number; data: any } | undefined + + try { + response = await axios.get("/search?q=test") + } catch (error) { + response = error as { status: number; data: any } + } + + expect(response?.status).toBe(500) + expect(response?.data).toEqual({ + error: { + error_code: "missing_openai_api_key", + message: "OPENAI_API_KEY environment variable is not configured", + }, + }) + + expect(openAiRequests.length).toBe(0) +})