Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions drizzle/0025_site_post_refresh_probe.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
ALTER TABLE `sites` ADD `post_refresh_probe_enabled` integer DEFAULT false;
--> statement-breakpoint
ALTER TABLE `sites` ADD `post_refresh_probe_model` text DEFAULT '';
--> statement-breakpoint
ALTER TABLE `sites` ADD `post_refresh_probe_scope` text DEFAULT 'single';
1 change: 1 addition & 0 deletions drizzle/0026_site_probe_latency_threshold.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ALTER TABLE `sites` ADD `post_refresh_probe_latency_threshold_ms` integer DEFAULT 0;
14 changes: 14 additions & 0 deletions drizzle/meta/_journal.json
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,20 @@
"when": 1775731596826,
"tag": "0024_projection_leases",
"breakpoints": true
},
{
"idx": 24,
"version": "6",
"when": 1776943800000,
"tag": "0025_site_post_refresh_probe",
"breakpoints": true
},
{
"idx": 25,
"version": "6",
"when": 1776944000000,
"tag": "0026_site_probe_latency_threshold",
"breakpoints": true
}
]
}
2 changes: 1 addition & 1 deletion src/server/db/generated/mysql.bootstrap.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CREATE TABLE IF NOT EXISTS `sites` (`id` INT AUTO_INCREMENT NOT NULL PRIMARY KEY, `name` TEXT NOT NULL, `url` TEXT NOT NULL, `platform` TEXT NOT NULL, `status` VARCHAR(191) NOT NULL DEFAULT 'active', `api_key` TEXT, `created_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `updated_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `is_pinned` BOOLEAN DEFAULT false, `sort_order` INT DEFAULT 0, `proxy_url` TEXT, `use_system_proxy` BOOLEAN DEFAULT false, `custom_headers` JSON, `external_checkin_url` TEXT, `global_weight` DOUBLE DEFAULT 1);
CREATE TABLE IF NOT EXISTS `sites` (`id` INT AUTO_INCREMENT NOT NULL PRIMARY KEY, `name` TEXT NOT NULL, `url` TEXT NOT NULL, `platform` TEXT NOT NULL, `status` VARCHAR(191) NOT NULL DEFAULT 'active', `api_key` TEXT, `created_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `updated_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `is_pinned` BOOLEAN DEFAULT false, `sort_order` INT DEFAULT 0, `proxy_url` TEXT, `use_system_proxy` BOOLEAN DEFAULT false, `custom_headers` JSON, `external_checkin_url` TEXT, `global_weight` DOUBLE DEFAULT 1, `post_refresh_probe_enabled` BOOLEAN DEFAULT false, `post_refresh_probe_model` VARCHAR(191) DEFAULT '', `post_refresh_probe_scope` VARCHAR(191) DEFAULT 'single', `post_refresh_probe_latency_threshold_ms` INT DEFAULT 0);
CREATE TABLE IF NOT EXISTS `accounts` (`id` INT AUTO_INCREMENT NOT NULL PRIMARY KEY, `site_id` INT NOT NULL, `username` TEXT, `access_token` TEXT NOT NULL, `api_token` TEXT, `balance` DOUBLE DEFAULT 0, `balance_used` DOUBLE DEFAULT 0, `quota` DOUBLE DEFAULT 0, `unit_cost` DOUBLE, `value_score` DOUBLE DEFAULT 0, `status` VARCHAR(191) DEFAULT 'active', `checkin_enabled` BOOLEAN DEFAULT true, `last_checkin_at` VARCHAR(191), `last_balance_refresh` VARCHAR(191), `extra_config` JSON, `created_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `updated_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `is_pinned` BOOLEAN DEFAULT false, `sort_order` INT DEFAULT 0, `oauth_provider` TEXT, `oauth_account_key` TEXT, `oauth_project_id` TEXT, FOREIGN KEY (`site_id`) REFERENCES `sites`(`id`) ON DELETE CASCADE);
CREATE TABLE IF NOT EXISTS `account_tokens` (`id` INT AUTO_INCREMENT NOT NULL PRIMARY KEY, `account_id` INT NOT NULL, `name` TEXT NOT NULL, `token` TEXT NOT NULL, `source` VARCHAR(191) DEFAULT 'manual', `enabled` BOOLEAN DEFAULT true, `is_default` BOOLEAN DEFAULT false, `created_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `updated_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `token_group` TEXT, `value_status` VARCHAR(191) NOT NULL DEFAULT 'ready', FOREIGN KEY (`account_id`) REFERENCES `accounts`(`id`) ON DELETE CASCADE);
CREATE TABLE IF NOT EXISTS `admin_snapshots` (`id` INT AUTO_INCREMENT NOT NULL PRIMARY KEY, `namespace` TEXT NOT NULL, `snapshot_key` TEXT NOT NULL, `payload` TEXT NOT NULL, `generated_at` VARCHAR(191) NOT NULL, `expires_at` VARCHAR(191) NOT NULL, `stale_until` VARCHAR(191) NOT NULL, `created_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')), `updated_at` VARCHAR(191) DEFAULT (DATE_FORMAT(NOW(), '%Y-%m-%d %H:%i:%s')));
Expand Down
2 changes: 1 addition & 1 deletion src/server/db/generated/postgres.bootstrap.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CREATE TABLE IF NOT EXISTS "sites" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, "name" TEXT NOT NULL, "url" TEXT NOT NULL, "platform" TEXT NOT NULL, "status" TEXT NOT NULL DEFAULT 'active', "api_key" TEXT, "created_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "updated_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "is_pinned" BOOLEAN DEFAULT false, "sort_order" INTEGER DEFAULT 0, "proxy_url" TEXT, "use_system_proxy" BOOLEAN DEFAULT false, "custom_headers" JSONB, "external_checkin_url" TEXT, "global_weight" DOUBLE PRECISION DEFAULT 1);
CREATE TABLE IF NOT EXISTS "sites" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, "name" TEXT NOT NULL, "url" TEXT NOT NULL, "platform" TEXT NOT NULL, "status" TEXT NOT NULL DEFAULT 'active', "api_key" TEXT, "created_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "updated_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "is_pinned" BOOLEAN DEFAULT false, "sort_order" INTEGER DEFAULT 0, "proxy_url" TEXT, "use_system_proxy" BOOLEAN DEFAULT false, "custom_headers" JSONB, "external_checkin_url" TEXT, "global_weight" DOUBLE PRECISION DEFAULT 1, "post_refresh_probe_enabled" BOOLEAN DEFAULT false, "post_refresh_probe_model" TEXT DEFAULT '', "post_refresh_probe_scope" TEXT DEFAULT 'single', "post_refresh_probe_latency_threshold_ms" INTEGER DEFAULT 0);
CREATE TABLE IF NOT EXISTS "accounts" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, "site_id" INTEGER NOT NULL, "username" TEXT, "access_token" TEXT NOT NULL, "api_token" TEXT, "balance" DOUBLE PRECISION DEFAULT 0, "balance_used" DOUBLE PRECISION DEFAULT 0, "quota" DOUBLE PRECISION DEFAULT 0, "unit_cost" DOUBLE PRECISION, "value_score" DOUBLE PRECISION DEFAULT 0, "status" TEXT DEFAULT 'active', "checkin_enabled" BOOLEAN DEFAULT true, "last_checkin_at" TEXT, "last_balance_refresh" TEXT, "extra_config" JSONB, "created_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "updated_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "is_pinned" BOOLEAN DEFAULT false, "sort_order" INTEGER DEFAULT 0, "oauth_provider" TEXT, "oauth_account_key" TEXT, "oauth_project_id" TEXT, FOREIGN KEY ("site_id") REFERENCES "sites"("id") ON DELETE CASCADE);
CREATE TABLE IF NOT EXISTS "account_tokens" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, "account_id" INTEGER NOT NULL, "name" TEXT NOT NULL, "token" TEXT NOT NULL, "source" TEXT DEFAULT 'manual', "enabled" BOOLEAN DEFAULT true, "is_default" BOOLEAN DEFAULT false, "created_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "updated_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "token_group" TEXT, "value_status" TEXT NOT NULL DEFAULT 'ready', FOREIGN KEY ("account_id") REFERENCES "accounts"("id") ON DELETE CASCADE);
CREATE TABLE IF NOT EXISTS "admin_snapshots" ("id" INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, "namespace" TEXT NOT NULL, "snapshot_key" TEXT NOT NULL, "payload" TEXT NOT NULL, "generated_at" TEXT NOT NULL, "expires_at" TEXT NOT NULL, "stale_until" TEXT NOT NULL, "created_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'), "updated_at" TEXT DEFAULT to_char(timezone('UTC', CURRENT_TIMESTAMP), 'YYYY-MM-DD HH24:MI:SS'));
Expand Down
24 changes: 24 additions & 0 deletions src/server/db/generated/schemaContract.json
Original file line number Diff line number Diff line change
Expand Up @@ -2089,6 +2089,30 @@
"notNull": false,
"defaultValue": "1",
"primaryKey": false
},
"post_refresh_probe_enabled": {
"logicalType": "boolean",
"notNull": false,
"defaultValue": "false",
"primaryKey": false
},
"post_refresh_probe_model": {
"logicalType": "text",
"notNull": false,
"defaultValue": "''",
"primaryKey": false
},
"post_refresh_probe_scope": {
"logicalType": "text",
"notNull": false,
"defaultValue": "'single'",
"primaryKey": false
},
"post_refresh_probe_latency_threshold_ms": {
"logicalType": "integer",
"notNull": false,
"defaultValue": "0",
"primaryKey": false
}
}
},
Expand Down
4 changes: 4 additions & 0 deletions src/server/db/schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ export const sites = sqliteTable('sites', {
sortOrder: integer('sort_order').default(0),
globalWeight: real('global_weight').default(1),
apiKey: text('api_key'),
postRefreshProbeEnabled: integer('post_refresh_probe_enabled', { mode: 'boolean' }).default(false),
postRefreshProbeModel: text('post_refresh_probe_model').default(''),
postRefreshProbeScope: text('post_refresh_probe_scope').default('single'),
Comment on lines +18 to +20
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
set -euo pipefail

printf 'All references to the new site probe columns:\n'
rg -n 'post_refresh_probe_(enabled|model|scope)' || true

printf '\nMigration/schema artifact files containing the new columns:\n'
fd -i '(migration|snapshot|journal|schema)' --type f -x sh -c '
  file="$1"
  if rg -q "post_refresh_probe_(enabled|model|scope)" "$file"; then
    printf "\n-- %s --\n" "$file"
    rg -n "post_refresh_probe_(enabled|model|scope)" "$file"
  fi
' sh {}

Repository: cita-777/metapi

Length of output: 1015


🏁 Script executed:

#!/bin/bash
set -euo pipefail

# Find migration-related directories
echo "=== Migration and artifact directory structure ==="
fd -i '(migration|migrations|snapshot|artifact)' -type d | head -20

# Check package.json for drift-check script
echo -e "\n=== Checking for drift-check script ==="
grep -A2 -B2 'drift-check' package.json 2>/dev/null || echo "Not found in package.json"

# Look for drizzle migration files
echo -e "\n=== Looking for Drizzle-related files ==="
fd '\.sql$' --type f | head -20

# Check for schema snapshot or journal files
echo -e "\n=== Schema artifacts and snapshots ==="
fd -e '.snapshot' -o -e '.json' -o -e '.sql' --path '*schema*' --path '*artifact*' --type f | head -20

Repository: cita-777/metapi

Length of output: 297


🏁 Script executed:

#!/bin/bash
set -euo pipefail

# List all files in common Drizzle migration locations
echo "=== Checking drizzle/migrations directory ==="
if [ -d "drizzle/migrations" ]; then
  ls -la drizzle/migrations/ | head -30
  echo "---"
  ls -1 drizzle/migrations/*.sql 2>/dev/null || echo "No SQL migration files found"
fi

# Check meta directory
echo -e "\n=== Checking drizzle/meta directory ==="
if [ -d "drizzle/meta" ]; then
  ls -la drizzle/meta/ | head -30
  echo "---"
  find drizzle/meta -type f | head -20
fi

# Check db directory structure
echo -e "\n=== Checking src/server/db directory ==="
find src/server/db -type f -name '*.sql' -o -name '*migration*' -o -name '*snapshot*' 2>/dev/null | head -20

Repository: cita-777/metapi

Length of output: 1700


🏁 Script executed:

#!/bin/bash
set -euo pipefail

# Check the latest snapshot for the new columns
echo "=== Checking 0024_snapshot.json for new columns ==="
rg 'post_refresh_probe_(enabled|model|scope)' drizzle/meta/0024_snapshot.json || echo "NOT FOUND in 0024_snapshot.json"

# Check the journal
echo -e "\n=== Checking _journal.json ==="
rg 'post_refresh_probe_(enabled|model|scope)' drizzle/meta/_journal.json || echo "NOT FOUND in _journal.json"

# Check generated SQL files
echo -e "\n=== Checking generated SQL files ==="
for file in src/server/db/generated/*.sql; do
  echo "--- $file ---"
  if rg -q 'post_refresh_probe_(enabled|model|scope)' "$file"; then
    echo "FOUND"
    rg -n 'post_refresh_probe_(enabled|model|scope)' "$file" | head -5
  else
    echo "NOT FOUND"
  fi
done

Repository: cita-777/metapi

Length of output: 489


🏁 Script executed:

#!/bin/bash
set -euo pipefail

# Run drift-check if available
echo "=== Running npm run repo:drift-check ==="
if grep -q 'drift-check' package.json; then
  npm run repo:drift-check 2>&1 || echo "Drift check encountered issues"
else
  echo "No drift-check script found in package.json"
fi

Repository: cita-777/metapi

Length of output: 1354


Regenerate Drizzle migration artifacts and SQL patches for the three new schema columns.

The schema definition in src/server/db/schema.ts was updated with the three new columns, but the corresponding migration snapshot and generated SQL patches were not regenerated. The new columns are absent from:

  • drizzle/meta/0024_snapshot.json (latest snapshot)
  • drizzle/meta/_journal.json
  • src/server/db/generated/mysql.bootstrap.sql, mysql.upgrade.sql, postgres.bootstrap.sql, postgres.upgrade.sql

Database upgrades will fail without these artifacts. Regenerate them using Drizzle's migration tooling to ensure all three outputs stay synchronized (Drizzle schema + SQLite migration history + checked-in SQL patches).

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/server/db/schema.ts` around lines 18 - 20, The three new schema columns
(postRefreshProbeEnabled, postRefreshProbeModel, postRefreshProbeScope) were
added to the Drizzle schema but the migration artifacts and SQL patches were not
regenerated; run Drizzle's migration tooling to generate a new migration that
includes these columns, update the snapshot and journal (e.g.,
0024_snapshot.json and _journal.json) and regenerate the checked-in SQL patches
(mysql.bootstrap.sql, mysql.upgrade.sql, postgres.bootstrap.sql,
postgres.upgrade.sql) so the schema, snapshot, and SQL outputs stay in sync.

postRefreshProbeLatencyThresholdMs: integer('post_refresh_probe_latency_threshold_ms').default(0),
createdAt: text('created_at').default(sql`(datetime('now'))`),
updatedAt: text('updated_at').default(sql`(datetime('now'))`),
}, (table) => ({
Expand Down
7 changes: 6 additions & 1 deletion src/server/db/schemaIntrospection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,12 @@ function normalizeDefaultValueForColumn(
if (rawDefaultValue == null) return null;

let normalized = String(rawDefaultValue).trim();
if (!normalized) return null;
// MySQL stores DEFAULT '' as an empty string in information_schema.COLUMNS.COLUMN_DEFAULT.
// An empty string here is a valid empty-string literal default, not "no default".
// Encode it as two single-quotes so it round-trips through schemaContract correctly.
if (!normalized) {
return (logicalType === 'text' || logicalType === 'json') ? "''" : null;
}

normalized = normalized.replace(/^default\s+/i, '').trim();
normalized = unwrapSurroundingParentheses(normalized);
Expand Down
4 changes: 4 additions & 0 deletions src/server/routes/api/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -702,6 +702,10 @@ function applyImportedSettingToRuntime(key: string, value: unknown) {
config.tokenRouterFailureCooldownMaxSec = normalized;
return;
}
case 'post_refresh_probe_enabled':
case 'post_refresh_probe_model':
case 'post_refresh_probe_scope':
return;
default:
return;
}
Expand Down
73 changes: 73 additions & 0 deletions src/server/routes/api/sites.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ import {
import { getSiteInitializationPreset } from '../../../shared/siteInitializationPresets.js';
import { normalizeSiteApiEndpointBaseUrl } from '../../services/siteApiEndpointService.js';
import { analyzePrimarySiteUrl } from '../../../shared/sitePrimaryUrl.js';
import { probeSiteModels } from '../../services/modelService.js';

function sseWrite(raw: import('http').ServerResponse, event: string, data: unknown) {
try { raw.write(`event: ${event}\ndata: ${JSON.stringify(data)}\n\n`); } catch { /* ignore */ }
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

function normalizeSiteStatus(input: unknown): 'active' | 'disabled' | null {
if (input === undefined || input === null) return null;
Expand Down Expand Up @@ -683,6 +688,14 @@ export async function sitesRoutes(app: FastifyInstance) {
if (body.isPinned !== undefined) updates.isPinned = normalizedPinned;
if (body.sortOrder !== undefined) updates.sortOrder = normalizedSortOrder;
if (body.globalWeight !== undefined) updates.globalWeight = normalizedGlobalWeight;
const anyBody = body as Record<string, unknown>;
if (anyBody.postRefreshProbeEnabled !== undefined) updates.postRefreshProbeEnabled = anyBody.postRefreshProbeEnabled === true || anyBody.postRefreshProbeEnabled === 1;
if (anyBody.postRefreshProbeModel !== undefined) updates.postRefreshProbeModel = String(anyBody.postRefreshProbeModel || '').trim();
if (anyBody.postRefreshProbeScope !== undefined) updates.postRefreshProbeScope = anyBody.postRefreshProbeScope === 'all' ? 'all' : 'single';
Comment on lines +691 to +694
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Validate probe settings instead of silently coercing them.

Boolean(anyBody.postRefreshProbeEnabled) turns "false" into true, and any invalid postRefreshProbeScope overwrites the setting as 'single'. Reject invalid values or reuse the existing boolean normalizer.

Suggested fix
     const anyBody = body as Record<string, unknown>;
-    if (anyBody.postRefreshProbeEnabled !== undefined) updates.postRefreshProbeEnabled = Boolean(anyBody.postRefreshProbeEnabled);
+    if (anyBody.postRefreshProbeEnabled !== undefined) {
+      const normalizedProbeEnabled = normalizePinnedFlag(anyBody.postRefreshProbeEnabled);
+      if (normalizedProbeEnabled === null) {
+        return reply.code(400).send({ error: 'Invalid postRefreshProbeEnabled value. Expected boolean.' });
+      }
+      updates.postRefreshProbeEnabled = normalizedProbeEnabled;
+    }
     if (anyBody.postRefreshProbeModel !== undefined) updates.postRefreshProbeModel = String(anyBody.postRefreshProbeModel || '').trim();
-    if (anyBody.postRefreshProbeScope !== undefined) updates.postRefreshProbeScope = anyBody.postRefreshProbeScope === 'all' ? 'all' : 'single';
+    if (anyBody.postRefreshProbeScope !== undefined) {
+      if (anyBody.postRefreshProbeScope !== 'single' && anyBody.postRefreshProbeScope !== 'all') {
+        return reply.code(400).send({ error: 'Invalid postRefreshProbeScope value. Expected single or all.' });
+      }
+      updates.postRefreshProbeScope = anyBody.postRefreshProbeScope;
+    }
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/server/routes/api/sites.ts` around lines 691 - 694, The current logic in
the route uses loose coercion (Boolean(anyBody.postRefreshProbeEnabled) and
forcing postRefreshProbeScope to 'single' for any non-'all' value), which
accepts invalid inputs; update the handler that reads anyBody to explicitly
validate inputs: for postRefreshProbeEnabled accept only true/false booleans or
the string 'true'/'false' (reuse the existing boolean normalizer used
elsewhere), for postRefreshProbeModel trim and accept only non-empty strings,
and for postRefreshProbeScope accept only the allowed values 'all' or 'single'
(reject or return a 400 for anything else) before assigning to
updates.postRefreshProbeEnabled, updates.postRefreshProbeModel and
updates.postRefreshProbeScope so invalid values are rejected rather than
silently coerced.

if (anyBody.postRefreshProbeLatencyThresholdMs !== undefined) {
const ms = Number(anyBody.postRefreshProbeLatencyThresholdMs);
updates.postRefreshProbeLatencyThresholdMs = Number.isFinite(ms) && ms >= 0 ? Math.trunc(ms) : 0;
}
updates.updatedAt = new Date().toISOString();
try {
await db.transaction(async (tx) => {
Expand Down Expand Up @@ -887,6 +900,66 @@ export async function sitesRoutes(app: FastifyInstance) {
return { siteId: id, models };
});

// Manually probe site models now (one-shot JSON)
app.post<{ Params: { id: string }; Body: unknown }>('/api/sites/:id/probe-now', async (request, reply) => {
const id = parseInt(request.params.id);
if (Number.isNaN(id)) {
return reply.code(400).send({ error: 'Invalid site id' });
}
const body = request.body as Record<string, unknown> | null;
const scope = body?.scope === 'all' ? 'all' : body?.scope === 'single' ? 'single' : undefined;
const modelName = typeof body?.modelName === 'string' ? body.modelName.trim() : undefined;
const parsedThresholdBody = Number(body?.latencyThresholdMs ?? 0);
const latencyThresholdMsBody = Number.isFinite(parsedThresholdBody) && parsedThresholdBody > 0 ? Math.trunc(parsedThresholdBody) : undefined;
const result = await probeSiteModels(id, { scope, modelName, latencyThresholdMs: latencyThresholdMsBody });
if (!result.success) {
return reply.code(422).send({ error: result.error });
}
return result;
});

// Streaming probe via SSE
app.get<{ Params: { id: string }; Querystring: { scope?: string; modelName?: string; latencyThresholdMs?: string } }>(
'/api/sites/:id/probe-stream',
async (request, reply) => {
reply.hijack();
reply.raw.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
});

const id = parseInt(request.params.id);
if (Number.isNaN(id)) {
sseWrite(reply.raw, 'error', { message: 'Invalid site id' });
reply.raw.end();
return;
}

const q = request.query;
const scope = q.scope === 'all' ? 'all' : q.scope === 'single' ? 'single' : undefined;
const modelName = q.modelName?.trim() || undefined;
const parsedThreshold = parseInt(q.latencyThresholdMs ?? '', 10);
const latencyThresholdMs = Number.isFinite(parsedThreshold) && parsedThreshold > 0 ? parsedThreshold : undefined;

// Propagate client disconnect to the probe worker pool
const probeAbort = new AbortController();
reply.raw.on('close', () => probeAbort.abort());

try {
const result = await probeSiteModels(id, { scope, modelName, latencyThresholdMs, signal: probeAbort.signal }, (ev) => {
sseWrite(reply.raw, ev.type, ev);
});
if (!probeAbort.signal.aborted) {
sseWrite(reply.raw, 'complete', result);
}
} catch (err: any) {
sseWrite(reply.raw, 'error', { message: err?.message || '探测失败' });
}
reply.raw.end();
},
);

// Detect platform for a URL
app.post<{ Body: unknown }>('/api/sites/detect', async (request, reply) => {
const parsedBody = parseSiteDetectPayload(request.body);
Expand Down
12 changes: 12 additions & 0 deletions src/server/services/backupService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -758,6 +758,10 @@ function buildAllApiHubV2AccountsSection(data: RawBackupData): {
sortOrder: section.sites.length,
globalWeight: 1,
apiKey: null,
postRefreshProbeEnabled: false,
postRefreshProbeModel: '',
postRefreshProbeScope: 'single',
Comment thread
coderabbitai[bot] marked this conversation as resolved.
postRefreshProbeLatencyThresholdMs: 0,
createdAt: input.createdAt,
updatedAt: input.updatedAt,
});
Expand Down Expand Up @@ -1001,6 +1005,10 @@ function buildAccountsSectionFromRefBackup(data: RawBackupData): AccountsBackupS
sortOrder: sites.length,
globalWeight: 1,
apiKey: null,
postRefreshProbeEnabled: false,
postRefreshProbeModel: '',
postRefreshProbeScope: 'single',
postRefreshProbeLatencyThresholdMs: 0,
createdAt: toIsoString(item.created_at),
updatedAt: toIsoString(item.updated_at),
});
Expand Down Expand Up @@ -1546,6 +1554,10 @@ async function importAccountsSection(section: AccountsBackupSection): Promise<vo
sortOrder: row.sortOrder ?? 0,
globalWeight: row.globalWeight ?? 1,
apiKey: row.apiKey,
postRefreshProbeEnabled: row.postRefreshProbeEnabled ?? false,
postRefreshProbeModel: row.postRefreshProbeModel ?? '',
postRefreshProbeScope: (row.postRefreshProbeScope === 'all' ? 'all' : 'single') as 'single' | 'all',
postRefreshProbeLatencyThresholdMs: row.postRefreshProbeLatencyThresholdMs ?? 0,
createdAt: row.createdAt,
updatedAt: row.updatedAt,
}).run();
Expand Down
Loading
Loading