diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..a2c41a0 --- /dev/null +++ b/.env.example @@ -0,0 +1,16 @@ +# === Payments (x402 on Base) === +PAYMENTS_RECEIVABLE_ADDRESS= # Your USDC wallet address on Base Mainnet +FACILITATOR_URL=https://facilitator.daydreams.systems # x402 facilitator endpoint +NETWORK=base # Blockchain network (base = Base Mainnet) + +# === API Keys === +BRAVE_API_KEY= # Brave Search API key (https://api.search.brave.com) +OPENAI_API_KEY= # OpenAI API key for GPT-4o-mini synthesis + +# === Server === +PORT=3000 # HTTP port (Railway sets this automatically) +CACHE_TTL_SECONDS=300 # In-memory cache TTL in seconds (default: 5 min) + +# === Optional === +# PRIVATE_KEY= # Agent wallet private key (for outbound payments) +# LOG_LEVEL=info # Logging level (debug|info|warn|error) diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d77474a --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +bun.lock diff --git a/DEPLOY.md b/DEPLOY.md new file mode 100644 index 0000000..2a01aa7 --- /dev/null +++ b/DEPLOY.md @@ -0,0 +1,72 @@ +# Deploying Queryx on Railway + +## Prerequisites +- [Railway CLI](https://docs.railway.app/guides/cli) installed +- A Railway account +- Environment variables ready (see `.env.example`) + +## Quick Deploy + +### 1. Login & Init +```bash +railway login +railway init # creates a new project +railway link # or link to existing project +``` + +### 2. Set Environment Variables +Via Railway dashboard → your project → Variables tab, set: + +| Variable | Required | Description | +|----------|----------|-------------| +| `PAYMENTS_RECEIVABLE_ADDRESS` | ✅ | USDC wallet on Base | +| `FACILITATOR_URL` | ✅ | `https://facilitator.daydreams.systems` | +| `NETWORK` | ✅ | `base` | +| `BRAVE_API_KEY` | ✅ | Brave Search API key | +| `OPENAI_API_KEY` | ✅ | OpenAI API key | +| `PORT` | ❌ | Auto-set by Railway | +| `CACHE_TTL_SECONDS` | ❌ | Default: 300 | + +Or via CLI: +```bash +railway variables set BRAVE_API_KEY=your_key +railway variables set OPENAI_API_KEY=your_key +railway variables set PAYMENTS_RECEIVABLE_ADDRESS=0x... +railway variables set FACILITATOR_URL=https://facilitator.daydreams.systems +railway variables set NETWORK=base +``` + +### 3. Deploy +```bash +railway up +``` + +Railway auto-detects the `Dockerfile` and builds. + +### 4. Custom Domain (Optional) +1. Railway dashboard → Settings → Domains +2. Add custom domain: `queryx.run` +3. Add CNAME record pointing to Railway's domain +4. Wait for SSL provisioning (~2 min) + +### 5. Verify +```bash +# Health check +curl https://your-app.up.railway.app/health + +# Should return 402 (no payment) +curl https://your-app.up.railway.app/v1/search?q=test + +# Run smoke test +./scripts/smoke-test.sh https://your-app.up.railway.app +``` + +## Local Docker Test +```bash +docker build -t queryx . +docker run -p 3000:3000 --env-file .env queryx +curl http://localhost:3000/health +``` + +## CI +GitHub Actions runs on every push to `main` and on PRs. See `.github/workflows/ci.yml`. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..bee5b1c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,9 @@ +FROM oven/bun:1 AS base +WORKDIR /app +COPY package.json bun.lock* ./ +RUN bun install --frozen-lockfile || bun install +COPY . . +EXPOSE 3000 +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s \ + CMD curl -f http://localhost:3000/health || exit 1 +CMD ["bun", "run", "src/index.ts"] diff --git a/bun.lock b/bun.lock deleted file mode 100644 index 70b0f21..0000000 --- a/bun.lock +++ /dev/null @@ -1,21 +0,0 @@ -{ - "lockfileVersion": 1, - "configVersion": 1, - "workspaces": { - "": { - "name": "queryx", - "devDependencies": { - "@types/bun": "latest", - }, - }, - }, - "packages": { - "@types/bun": ["@types/bun@1.3.9", "", { "dependencies": { "bun-types": "1.3.9" } }, "sha512-KQ571yULOdWJiMH+RIWIOZ7B2RXQGpL1YQrBtLIV3FqDcCu6FsbFUBwhdKUlCKUpS3PJDsHlJ1QKlpxoVR+xtw=="], - - "@types/node": ["@types/node@25.3.2", "", { "dependencies": { "undici-types": "~7.18.0" } }, "sha512-RpV6r/ij22zRRdyBPcxDeKAzH43phWVKEjL2iksqo1Vz3CuBUrgmPpPhALKiRfU7OMCmeeO9vECBMsV0hMTG8Q=="], - - "bun-types": ["bun-types@1.3.9", "", { "dependencies": { "@types/node": "*" } }, "sha512-+UBWWOakIP4Tswh0Bt0QD0alpTY8cb5hvgiYeWCMet9YukHbzuruIEeXC2D7nMJPB12kbh8C7XJykSexEqGKJg=="], - - "undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="], - } -} diff --git a/package.json b/package.json index d5cd7f0..401ab01 100644 --- a/package.json +++ b/package.json @@ -8,5 +8,14 @@ }, "devDependencies": { "@types/bun": "latest" + }, + "dependencies": { + "@lucid-agents/core": "^2.5.0", + "@lucid-agents/hono": "^0.9.6", + "@lucid-agents/http": "^1.10.2", + "@lucid-agents/payments": "^2.5.0", + "@lucid-agents/wallet": "^0.6.2", + "hono": "^4.12.3", + "zod": "^3.25.0-beta.20250519T094321" } } diff --git a/railway.json b/railway.json new file mode 100644 index 0000000..e1c0d59 --- /dev/null +++ b/railway.json @@ -0,0 +1,10 @@ +{ + "$schema": "https://railway.app/railway.schema.json", + "build": { "builder": "DOCKERFILE" }, + "deploy": { + "healthcheckPath": "/health", + "healthcheckTimeout": 10, + "restartPolicyType": "ON_FAILURE", + "restartPolicyMaxRetries": 3 + } +} diff --git a/scripts/smoke-test.sh b/scripts/smoke-test.sh new file mode 100755 index 0000000..c127a9d --- /dev/null +++ b/scripts/smoke-test.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# Queryx post-deploy smoke test +# Usage: ./scripts/smoke-test.sh [BASE_URL] + +set -euo pipefail + +BASE_URL="${1:-http://localhost:3000}" +PASS=0 +FAIL=0 + +check() { + local desc="$1" expected="$2" actual="$3" + if [ "$expected" = "$actual" ]; then + echo "✅ $desc (got $actual)" + ((PASS++)) + else + echo "❌ $desc (expected $expected, got $actual)" + ((FAIL++)) + fi +} + +echo "🔍 Smoke testing $BASE_URL" +echo "---" + +# 1. Health returns 200 +STATUS=$(curl -s -o /dev/null -w '%{http_code}' "$BASE_URL/health") +check "/health returns 200" "200" "$STATUS" + +# 2. Health body has status: ok +BODY=$(curl -s "$BASE_URL/health") +if echo "$BODY" | grep -q '"status":"ok"'; then + echo "✅ /health body contains status:ok" + ((PASS++)) +else + echo "❌ /health body missing status:ok — got: $BODY" + ((FAIL++)) +fi + +# 3. Search returns 402 without payment +STATUS=$(curl -s -o /dev/null -w '%{http_code}' "$BASE_URL/v1/search?q=test") +check "/v1/search returns 402 without payment" "402" "$STATUS" + +# 4. News returns 402 +STATUS=$(curl -s -o /dev/null -w '%{http_code}' "$BASE_URL/v1/search/news?q=test") +check "/v1/search/news returns 402 without payment" "402" "$STATUS" + +# 5. Deep returns 402 +STATUS=$(curl -s -o /dev/null -w '%{http_code}' -X POST -H 'Content-Type: application/json' -d '{"query":"test"}' "$BASE_URL/v1/search/deep") +check "/v1/search/deep returns 402 without payment" "402" "$STATUS" + +echo "---" +echo "Results: $PASS passed, $FAIL failed" + +[ "$FAIL" -eq 0 ] && exit 0 || exit 1 diff --git a/src/agent.ts b/src/agent.ts new file mode 100644 index 0000000..33770fc --- /dev/null +++ b/src/agent.ts @@ -0,0 +1,58 @@ +/** + * Queryx Lucid Agent — x402 paid API with TDD. + * Core agent setup with extensions and entrypoints. + */ +import { createAgent } from "@lucid-agents/core"; +import { payments, paymentsFromEnv } from "@lucid-agents/payments"; +import { z } from "zod"; +import { + SearchQuerySchema, + DeepSearchBodySchema, + SearchResponseSchema, +} from "./schemas"; + +const NETWORK = process.env.NETWORK || "base"; + +const paymentsConfig = paymentsFromEnv({ + network: NETWORK, +}); + +const paymentsExt = payments({ config: paymentsConfig }); + +export const runtime = await createAgent({ + name: "queryx", + url: `http://localhost:${process.env.PORT || 3000}`, + version: "0.1.0", + description: + "AI-powered web search agent accepting x402 USDC micropayments on Base.", + capabilities: { + streaming: false, + pushNotifications: false, + }, +}) + .use(paymentsExt) + .addEntrypoint({ + key: "search", + title: "Web Search", + description: "Web search + AI synthesis", + input: SearchQuerySchema, + output: SearchResponseSchema, + invoke: { price: { amount: "0.001", currency: "USDC" } }, + }) + .addEntrypoint({ + key: "search-news", + title: "News Search", + description: "News-focused search + AI synthesis", + input: SearchQuerySchema, + output: SearchResponseSchema, + invoke: { price: { amount: "0.001", currency: "USDC" } }, + }) + .addEntrypoint({ + key: "search-deep", + title: "Deep Research", + description: "Multi-source deep research + AI synthesis", + input: DeepSearchBodySchema, + output: SearchResponseSchema, + invoke: { price: { amount: "0.005", currency: "USDC" } }, + }) + .build(); diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 0000000..1f4b825 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,37 @@ +/** + * Queryx server entrypoint. + * Uses Lucid Agents Hono adapter with x402 payment middleware. + */ +import { createAgentApp } from "@lucid-agents/hono"; +import { Hono } from "hono"; +import { runtime } from "./agent"; +import searchRoute from "./routes/search"; +import searchNewsRoute from "./routes/search-news"; +import searchDeepRoute from "./routes/search-deep"; + +const startTime = Date.now(); + +const { app } = await createAgentApp(runtime, { + afterMount(honoApp: Hono) { + // Mount custom routes after agent routes + honoApp.route("/v1/search/news", searchNewsRoute); + honoApp.route("/v1/search/deep", searchDeepRoute); + honoApp.route("/v1/search", searchRoute); + + // Health endpoint (free, no payment required) + honoApp.get("/health", (c) => + c.json({ + status: "ok" as const, + version: "0.1.0", + uptime: Math.floor((Date.now() - startTime) / 1000), + }) + ); + }, +}); + +const port = Number(process.env.PORT || 3000); + +export default { + port, + fetch: app.fetch, +}; diff --git a/src/logic/search.ts b/src/logic/search.ts new file mode 100644 index 0000000..1a104e3 --- /dev/null +++ b/src/logic/search.ts @@ -0,0 +1,84 @@ +/** + * Query handling + source coordination. + * Orchestrates brave search, ranking, synthesis, and caching. + */ +import { braveSearch, type SearchResult, type BraveSearchOptions } from "./brave"; +import { rank as rankAndDeduplicate } from "./rank"; +import { synthesise as synthesize } from "./synth"; +import { Cache } from "./cache"; +import type { SearchResponse } from "../schemas"; + +const cache = new Cache( + Number(process.env.CACHE_TTL_SECONDS || 300) * 1000 +); + + +export function normalizeQuery(q: string): string { + return q.trim().toLowerCase().replace(/\s+/g, " "); +} + +function computeResultsAge(sources: SearchResult[]): string { + if (!sources.length) return "unknown"; + const now = Date.now(); + const published = sources + .filter((s) => s.published) + .map((s) => new Date(s.published!).getTime()) + .filter((t) => !isNaN(t)); + if (!published.length) return "unknown"; + const newest = Math.max(...published); + const diffMs = now - newest; + const hours = Math.round(diffMs / 3600000); + if (hours < 1) return "<1h"; + if (hours < 24) return `${hours}h`; + return `${Math.round(hours / 24)}d`; +} + +export interface SearchOptions { + type?: "web" | "news"; + count?: number; + deep?: boolean; +} + +export async function search( + query: string, + options: SearchOptions = {} +): Promise { + const normalized = normalizeQuery(query); + const cacheKey = `${options.type || "web"}:${options.deep ? "deep:" : ""}${normalized}`; + + const cached = cache.get(cacheKey); + if (cached) return cached.value; + + const braveOpts: BraveSearchOptions = { + count: options.count || 5, + type: options.type || "web", + }; + if (options.type === "news") braveOpts.freshness = "day"; + + const rawResults = await braveSearch(normalized, braveOpts); + const ranked = rankAndDeduplicate(rawResults); + const synthResult = await synthesize(normalized, ranked); + + const response: SearchResponse = { + query, + answer: synthResult.answer, + sources: ranked.map((r) => ({ + title: r.title, + url: r.url, + snippet: r.snippet, + ...(r.published ? { published: r.published } : {}), + })), + confidence: synthResult.confidence, + freshness: { + fetchedAt: new Date().toISOString(), + resultsAge: computeResultsAge(ranked), + }, + model: synthResult.model, + tokens: synthResult.tokens, + }; + + cache.set(cacheKey, response); + return response; +} + +export { cache }; diff --git a/src/routes/search-deep.ts b/src/routes/search-deep.ts new file mode 100644 index 0000000..5ea24bf --- /dev/null +++ b/src/routes/search-deep.ts @@ -0,0 +1,32 @@ +/** + * POST /v1/search/deep — multi-source deep research + */ +import { Hono } from "hono"; +import { DeepSearchBodySchema } from "../schemas"; +import { search } from "../logic/search"; + +const app = new Hono(); + +app.post("/", async (c) => { + const body = await c.req.json().catch(() => null); + if (!body) { + return c.json( + { error: "Invalid JSON body", code: "INVALID_BODY", status: 400 }, + 400 + ); + } + const parsed = DeepSearchBodySchema.safeParse(body); + if (!parsed.success) { + return c.json( + { error: "Invalid request", code: "INVALID_BODY", status: 400 }, + 400 + ); + } + const result = await search(parsed.data.query, { + deep: true, + count: parsed.data.sources, + }); + return c.json(result); +}); + +export default app; diff --git a/src/routes/search-news.ts b/src/routes/search-news.ts new file mode 100644 index 0000000..7c84250 --- /dev/null +++ b/src/routes/search-news.ts @@ -0,0 +1,28 @@ +/** + * GET /v1/search/news — news-focused search + */ +import { Hono } from "hono"; +import { SearchQuerySchema } from "../schemas"; +import { search } from "../logic/search"; + +const app = new Hono(); + +app.get("/", async (c) => { + const parsed = SearchQuerySchema.safeParse({ + q: c.req.query("q"), + count: c.req.query("count"), + }); + if (!parsed.success) { + return c.json( + { error: "Invalid query", code: "INVALID_QUERY", status: 400 }, + 400 + ); + } + const result = await search(parsed.data.q, { + type: "news", + count: parsed.data.count, + }); + return c.json(result); +}); + +export default app; diff --git a/src/routes/search.ts b/src/routes/search.ts new file mode 100644 index 0000000..467f684 --- /dev/null +++ b/src/routes/search.ts @@ -0,0 +1,25 @@ +/** + * GET /v1/search — web search + AI synthesis + */ +import { Hono } from "hono"; +import { SearchQuerySchema } from "../schemas"; +import { search } from "../logic/search"; + +const app = new Hono(); + +app.get("/", async (c) => { + const parsed = SearchQuerySchema.safeParse({ + q: c.req.query("q"), + count: c.req.query("count"), + }); + if (!parsed.success) { + return c.json( + { error: "Invalid query", code: "INVALID_QUERY", status: 400 }, + 400 + ); + } + const result = await search(parsed.data.q, { count: parsed.data.count }); + return c.json(result); +}); + +export default app; diff --git a/src/schemas/index.ts b/src/schemas/index.ts new file mode 100644 index 0000000..3caa3f9 --- /dev/null +++ b/src/schemas/index.ts @@ -0,0 +1,57 @@ +/** + * Zod v4 schemas for all Queryx API endpoints. + */ +import { z } from "zod"; + +export const SourceSchema = z.object({ + title: z.string(), + url: z.string().url(), + snippet: z.string(), + published: z.string().optional(), +}); + +export const FreshnessSchema = z.object({ + fetchedAt: z.string(), + resultsAge: z.string(), +}); + +export const TokensSchema = z.object({ + in: z.number().int().nonnegative(), + out: z.number().int().nonnegative(), +}); + +export const SearchResponseSchema = z.object({ + query: z.string(), + answer: z.string(), + sources: z.array(SourceSchema), + confidence: z.number().min(0).max(1), + freshness: FreshnessSchema, + model: z.string(), + tokens: TokensSchema, +}); + +export const SearchQuerySchema = z.object({ + q: z.string().min(1), + count: z.coerce.number().int().min(1).max(20).optional().default(5), +}); + +export const DeepSearchBodySchema = z.object({ + query: z.string().min(1), + sources: z.number().int().min(1).max(10).optional().default(5), +}); + +export const ErrorSchema = z.object({ + error: z.string(), + code: z.string(), + status: z.number(), +}); + +export const HealthSchema = z.object({ + status: z.literal("ok"), + version: z.string(), + uptime: z.number(), +}); + +export type SearchResponse = z.infer; +export type Source = z.infer; +export type ErrorResponse = z.infer; diff --git a/tests/contract/schemas.test.ts b/tests/contract/schemas.test.ts new file mode 100644 index 0000000..9fed958 --- /dev/null +++ b/tests/contract/schemas.test.ts @@ -0,0 +1,136 @@ +/** + * Contract tests — all request/response schemas, error envelopes. + */ +import { describe, test, expect } from "bun:test"; +import { + SearchResponseSchema, + SearchQuerySchema, + DeepSearchBodySchema, + ErrorSchema, + HealthSchema, + SourceSchema, +} from "../../src/schemas"; + +describe("SearchQuerySchema", () => { + test("accepts valid query", () => { + const result = SearchQuerySchema.safeParse({ q: "test query" }); + expect(result.success).toBe(true); + }); + + test("rejects empty query", () => { + const result = SearchQuerySchema.safeParse({ q: "" }); + expect(result.success).toBe(false); + }); + + test("defaults count to 5", () => { + const result = SearchQuerySchema.parse({ q: "test" }); + expect(result.count).toBe(5); + }); + + test("coerces string count", () => { + const result = SearchQuerySchema.parse({ q: "test", count: "3" }); + expect(result.count).toBe(3); + }); +}); + +describe("DeepSearchBodySchema", () => { + test("accepts valid body", () => { + const result = DeepSearchBodySchema.safeParse({ query: "deep test" }); + expect(result.success).toBe(true); + }); + + test("defaults sources to 5", () => { + const result = DeepSearchBodySchema.parse({ query: "test" }); + expect(result.sources).toBe(5); + }); + + test("rejects sources > 10", () => { + const result = DeepSearchBodySchema.safeParse({ query: "t", sources: 15 }); + expect(result.success).toBe(false); + }); +}); + +describe("SearchResponseSchema", () => { + const validResponse = { + query: "test", + answer: "This is an answer.", + sources: [ + { title: "Source 1", url: "https://example.com", snippet: "snippet" }, + ], + confidence: 0.87, + freshness: { fetchedAt: "2026-02-27T10:00:00Z", resultsAge: "4h" }, + model: "queryx-fast-v1", + tokens: { in: 312, out: 187 }, + }; + + test("accepts valid response", () => { + const result = SearchResponseSchema.safeParse(validResponse); + expect(result.success).toBe(true); + }); + + test("rejects confidence > 1", () => { + const result = SearchResponseSchema.safeParse({ + ...validResponse, + confidence: 1.5, + }); + expect(result.success).toBe(false); + }); + + test("rejects missing freshness", () => { + const { freshness, ...rest } = validResponse; + const result = SearchResponseSchema.safeParse(rest); + expect(result.success).toBe(false); + }); +}); + +describe("SourceSchema", () => { + test("accepts source with published date", () => { + const result = SourceSchema.safeParse({ + title: "Test", + url: "https://example.com", + snippet: "A snippet", + published: "2026-02-27T10:00:00Z", + }); + expect(result.success).toBe(true); + }); + + test("accepts source without published date", () => { + const result = SourceSchema.safeParse({ + title: "Test", + url: "https://example.com", + snippet: "A snippet", + }); + expect(result.success).toBe(true); + }); +}); + +describe("ErrorSchema", () => { + test("accepts valid error", () => { + const result = ErrorSchema.safeParse({ + error: "Not found", + code: "NOT_FOUND", + status: 404, + }); + expect(result.success).toBe(true); + }); +}); + +describe("HealthSchema", () => { + test("accepts valid health", () => { + const result = HealthSchema.safeParse({ + status: "ok", + version: "0.1.0", + uptime: 3600, + }); + expect(result.success).toBe(true); + }); + + test("rejects wrong status literal", () => { + const result = HealthSchema.safeParse({ + status: "error", + version: "0.1.0", + uptime: 0, + }); + expect(result.success).toBe(false); + }); +}); diff --git a/tests/freshness/cache-freshness.test.ts b/tests/freshness/cache-freshness.test.ts new file mode 100644 index 0000000..0442475 --- /dev/null +++ b/tests/freshness/cache-freshness.test.ts @@ -0,0 +1,50 @@ +/** + * Freshness/cache tests — TTL behaviour, staleness metadata. + * Note: Cache constructor takes ttlSeconds (multiplied by 1000 internally). + */ +import { describe, test, expect } from "bun:test"; +import { Cache } from "../../src/logic/cache"; + +describe("Cache TTL", () => { + test("returns cached value within TTL", () => { + const cache = new Cache(60); // 60 seconds + cache.set("key", "value"); + const result = cache.get("key"); + expect(result).not.toBeNull(); + expect(result!.value).toBe("value"); + expect(result!.stale).toBe(false); + }); + + test("returns null after TTL expires", async () => { + const cache = new Cache(0.001); // 1ms = 0.001 seconds + cache.set("key", "value"); + await new Promise((r) => setTimeout(r, 20)); + expect(cache.get("key")).toBeNull(); + }); + + test("tracks hit/miss stats", () => { + const cache = new Cache(60); + cache.set("a", "1"); + cache.get("a"); // hit + cache.get("b"); // miss + const s = cache.stats(); + expect(s.hits).toBe(1); + expect(s.misses).toBe(1); + }); + + test("clear empties cache", () => { + const cache = new Cache(60); + cache.set("a", "1"); + cache.set("b", "2"); + cache.clear(); + expect(cache.get("a")).toBeNull(); + expect(cache.stats().size).toBe(0); + }); + + test("normalizeKey produces stable keys", () => { + expect(Cache.normalizeKey("Hello World")).toBe("hello world"); + expect(Cache.normalizeKey("test", { b: "2", a: "1" })).toBe( + "test|a=1&b=2" + ); + }); +}); diff --git a/tests/integration/endpoints.test.ts b/tests/integration/endpoints.test.ts new file mode 100644 index 0000000..a6b4c9f --- /dev/null +++ b/tests/integration/endpoints.test.ts @@ -0,0 +1,52 @@ +/** + * Integration tests — endpoint responses, error handling. + * Tests route handling without the payment middleware (unit-level integration). + */ +import { describe, test, expect } from "bun:test"; +import searchRoute from "../../src/routes/search"; +import searchNewsRoute from "../../src/routes/search-news"; +import searchDeepRoute from "../../src/routes/search-deep"; + +describe("GET /v1/search", () => { + test("returns 400 for missing query", async () => { + const req = new Request("http://localhost/"); + const res = await searchRoute.fetch(req); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.code).toBe("INVALID_QUERY"); + }); +}); + +describe("GET /v1/search/news", () => { + test("returns 400 for missing query", async () => { + const req = new Request("http://localhost/"); + const res = await searchNewsRoute.fetch(req); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.code).toBe("INVALID_QUERY"); + }); +}); + +describe("POST /v1/search/deep", () => { + test("returns 400 for invalid JSON", async () => { + const req = new Request("http://localhost/", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: "not json", + }); + const res = await searchDeepRoute.fetch(req); + expect(res.status).toBe(400); + const body = await res.json(); + expect(body.code).toBe("INVALID_BODY"); + }); + + test("returns 400 for missing query field", async () => { + const req = new Request("http://localhost/", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ sources: 3 }), + }); + const res = await searchDeepRoute.fetch(req); + expect(res.status).toBe(400); + }); +}); diff --git a/tests/logic/search-logic.test.ts b/tests/logic/search-logic.test.ts new file mode 100644 index 0000000..95102d3 --- /dev/null +++ b/tests/logic/search-logic.test.ts @@ -0,0 +1,27 @@ +/** + * Logic tests — query normalization, search coordination. + */ +import { describe, test, expect } from "bun:test"; +import { normalizeQuery } from "../../src/logic/search"; + +describe("normalizeQuery", () => { + test("trims whitespace", () => { + expect(normalizeQuery(" hello ")).toBe("hello"); + }); + + test("lowercases", () => { + expect(normalizeQuery("Hello World")).toBe("hello world"); + }); + + test("collapses multiple spaces", () => { + expect(normalizeQuery("hello world")).toBe("hello world"); + }); + + test("handles mixed", () => { + expect(normalizeQuery(" Hello WORLD ")).toBe("hello world"); + }); + + test("empty string stays empty", () => { + expect(normalizeQuery("")).toBe(""); + }); +});