Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 68 additions & 14 deletions packages/persistence/__tests__/pg-stores.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,9 @@ import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
import pg from "pg";
import { PgLedgerStore } from "../src/pg-ledger-store.js";
import { PgTokenStore } from "../src/pg-token-store.js";
import { ensurePgSchema } from "../src/pg-schema.js";
import type { SintLedgerEvent, SintCapabilityToken } from "@pshkv/core";
import { readFileSync } from "node:fs";
import { join, dirname } from "node:path";
import { fileURLToPath } from "node:url";

const __dirname = dirname(fileURLToPath(import.meta.url));
const DATABASE_URL = process.env.DATABASE_URL;

const describeWithPg = DATABASE_URL ? describe : describe.skip;
Expand Down Expand Up @@ -56,11 +53,7 @@ describeWithPg("PgLedgerStore", () => {

beforeAll(async () => {
pool = new pg.Pool({ connectionString: DATABASE_URL });
const migrationSql = readFileSync(
join(__dirname, "../migrations/001_create_ledger.sql"),
"utf-8",
);
await pool.query(migrationSql);
await ensurePgSchema(pool);
store = new PgLedgerStore(pool);
});

Expand Down Expand Up @@ -153,11 +146,7 @@ describeWithPg("PgTokenStore", () => {

beforeAll(async () => {
pool = new pg.Pool({ connectionString: DATABASE_URL });
const migrationSql = readFileSync(
join(__dirname, "../migrations/002_create_tokens.sql"),
"utf-8",
);
await pool.query(migrationSql);
await ensurePgSchema(pool);
store = new PgTokenStore(pool);
});

Expand Down Expand Up @@ -231,4 +220,69 @@ describeWithPg("PgTokenStore", () => {
const retrieved = await store.get("tok-1");
expect(retrieved!.actions).toEqual(["publish", "subscribe"]);
});

// Regression for #169: every optional SintCapabilityToken field must
// round-trip byte-identical, otherwise canonical-JSON signatures fail
// verification after a store -> get cycle.
it("preserves all optional fields losslessly (regression for #169)", async () => {
const token = {
...makeToken("tok-opt"),
modelConstraints: {
allowedModelIds: ["claude-opus-4-7", "claude-sonnet-4-6"],
maxModelVersion: "4.7.0",
modelFingerprintHash: "a".repeat(64),
},
attestationRequirements: {
minAttestationGrade: 2 as const,
allowedTeeBackends: ["intel-sgx", "amd-sev"] as const,
requireForTiers: ["strong"] as const,
},
verifiableComputeRequirements: {
allowedProofTypes: ["risc0-groth16"] as const,
verifierRefs: ["verifier://risc0/v1"],
maxProofAgeMs: 60_000,
requirePublicInputsHash: true,
requireForTiers: ["strong"] as const,
},
executionEnvelope: {
corridorId: "corridor-abc",
expiresAt: "2026-03-16T23:00:00.000000Z",
maxDeviationMeters: 0.5,
maxHeadingDeviationDeg: 5,
maxVelocityMps: 1.5,
maxForceNewtons: 20,
},
behavioralConstraints: {
maxCallsPerMinute: 30,
allowedPatterns: ["^safe:"],
deniedPatterns: ["rm -rf"],
maxPayloadBytes: 65536,
},
passportId: "aps:passport:xyz",
delegationDepth: 2,
revocationEndpoint: "https://revocation.example/v1/crl",
} as unknown as SintCapabilityToken;

await store.store(token);
const retrieved = await store.get("tok-opt");
expect(retrieved).toBeDefined();
expect(retrieved).toEqual(token);
});

it("preserves nested constraints object structure", async () => {
const token = {
...makeToken("tok-nested"),
constraints: {
maxVelocityMps: 0.5,
geofence: { coordinates: [[0, 0], [10, 0], [10, 10], [0, 10]] },
timeWindow: { start: "09:00", end: "17:00", timezone: "UTC" },
rateLimit: { maxPerMinute: 10 },
quorum: { required: 2, authorized: ["a", "b", "c"] },
},
} as unknown as SintCapabilityToken;

await store.store(token);
const retrieved = await store.get("tok-nested");
expect(retrieved).toEqual(token);
});
});
60 changes: 60 additions & 0 deletions packages/persistence/migrations/003_tokens_payload.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
-- SINT Persistence: migrate sint_tokens to a payload-JSONB layout.
--
-- Rationale: SintCapabilityToken keeps gaining optional fields
-- (modelConstraints, attestationRequirements, verifiableComputeRequirements,
-- executionEnvelope, behavioralConstraints, passportId, delegationDepth,
-- revocationEndpoint). Per-field columns silently dropped any field added
-- after the initial schema, causing signature verification to fail
-- round-trip (issue #169).
--
-- New layout: the full canonical token is stored as one JSONB `payload`
-- column. A small set of scalar columns (token_id, subject, issuer,
-- resource, expires_at) is kept for indexed lookup. Adding a new optional
-- field to the type requires no schema change.
--
-- Safe to run against databases that have (a) only 002 applied,
-- (b) 003 already applied (no-op), or (c) the fresh 003 layout from
-- `ensurePgSchema`.

ALTER TABLE sint_tokens
ADD COLUMN IF NOT EXISTS payload JSONB;

-- Backfill payload from legacy columns for rows where it's still NULL.
-- Only references legacy columns via to_jsonb(sint_tokens) so this is a
-- no-op when those columns have already been dropped.
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'sint_tokens' AND column_name = 'actions'
) THEN
UPDATE sint_tokens
SET payload = jsonb_build_object(
'tokenId', token_id,
'issuer', issuer,
'subject', subject,
'resource', resource,
'actions', actions,
'constraints', constraints,
'delegationChain', delegation_chain,
'issuedAt', issued_at,
'expiresAt', expires_at,
'revocable', revocable,
'signature', signature
)
WHERE payload IS NULL;
END IF;
END$$;

ALTER TABLE sint_tokens
ALTER COLUMN payload SET NOT NULL;

-- Drop legacy columns now that their content lives in payload. token_id,
-- subject, issuer, resource, expires_at remain for indexed lookup.
ALTER TABLE sint_tokens
DROP COLUMN IF EXISTS actions,
DROP COLUMN IF EXISTS constraints,
DROP COLUMN IF EXISTS delegation_chain,
DROP COLUMN IF EXISTS issued_at,
DROP COLUMN IF EXISTS revocable,
DROP COLUMN IF EXISTS signature;
65 changes: 55 additions & 10 deletions packages/persistence/src/pg-schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,27 +39,72 @@ export async function ensurePgSchema(pool: pg.Pool): Promise<void> {
ON sint_ledger_events (event_type, sequence_number);
`);

// Capability tokens: the full canonical token lives in `payload` (JSONB)
// so new optional fields on SintCapabilityToken round-trip without schema
// churn. A small set of indexed scalar columns supports lookup. See #169.
await pool.query(`
CREATE TABLE IF NOT EXISTS sint_tokens (
token_id TEXT PRIMARY KEY,
issuer TEXT NOT NULL,
subject TEXT NOT NULL,
resource TEXT NOT NULL,
actions JSONB NOT NULL,
constraints JSONB NOT NULL,
delegation_chain JSONB NOT NULL,
issued_at TEXT NOT NULL,
token_id TEXT PRIMARY KEY,
issuer TEXT NOT NULL,
subject TEXT NOT NULL,
resource TEXT NOT NULL,
expires_at TEXT NOT NULL,
revocable BOOLEAN NOT NULL DEFAULT true,
signature TEXT NOT NULL
payload JSONB NOT NULL
);
`);

// Self-heal legacy installs (schema from 002) by adding payload, backfilling
// it from legacy columns, then dropping them. Matches migration 003.
await pool.query(`
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'sint_tokens' AND column_name = 'actions'
) THEN
ALTER TABLE sint_tokens ADD COLUMN IF NOT EXISTS payload JSONB;
UPDATE sint_tokens
SET payload = jsonb_build_object(
'tokenId', token_id,
'issuer', issuer,
'subject', subject,
'resource', resource,
'actions', actions,
'constraints', constraints,
'delegationChain', delegation_chain,
'issuedAt', issued_at,
'expiresAt', expires_at,
'revocable', revocable,
'signature', signature
)
WHERE payload IS NULL;
ALTER TABLE sint_tokens ALTER COLUMN payload SET NOT NULL;
ALTER TABLE sint_tokens
DROP COLUMN IF EXISTS actions,
DROP COLUMN IF EXISTS constraints,
DROP COLUMN IF EXISTS delegation_chain,
DROP COLUMN IF EXISTS issued_at,
DROP COLUMN IF EXISTS revocable,
DROP COLUMN IF EXISTS signature;
END IF;
END$$;
`);

await pool.query(`
CREATE INDEX IF NOT EXISTS idx_sint_tokens_subject
ON sint_tokens (subject);
`);

await pool.query(`
CREATE INDEX IF NOT EXISTS idx_sint_tokens_issuer
ON sint_tokens (issuer);
`);

await pool.query(`
CREATE INDEX IF NOT EXISTS idx_sint_tokens_resource
ON sint_tokens (resource);
`);

await pool.query(`
CREATE TABLE IF NOT EXISTS sint_revocations (
token_id TEXT PRIMARY KEY,
Expand Down
55 changes: 23 additions & 32 deletions packages/persistence/src/pg-token-store.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,30 @@
/**
* SINT Persistence — PostgreSQL Token Store.
*
* The full canonical token is persisted in a single JSONB `payload` column
* so new optional fields on `SintCapabilityToken` (modelConstraints,
* attestationRequirements, verifiableComputeRequirements, executionEnvelope,
* behavioralConstraints, passportId, delegationDepth, revocationEndpoint…)
* round-trip losslessly and signatures verify. A handful of scalar columns
* (token_id, subject, issuer, resource, expires_at) is denormalized for
* indexed lookup only. See #169.
*
* @module @sint/persistence/pg-token-store
*/

import type pg from "pg";
import type { SintCapabilityToken, UUIDv7 } from "@pshkv/core";
import type { TokenStore } from "./interfaces.js";

/** Map a database row to a SintCapabilityToken. */
function rowToToken(row: any): SintCapabilityToken {
return {
tokenId: row.token_id,
issuer: row.issuer,
subject: row.subject,
resource: row.resource,
actions: row.actions,
constraints: row.constraints,
delegationChain: row.delegation_chain,
issuedAt: row.issued_at,
expiresAt: row.expires_at,
revocable: row.revocable,
signature: row.signature,
};
/**
* Reconstruct a `SintCapabilityToken` from a `sint_tokens` row.
*
* The `payload` column is authoritative — scalar columns are denormalized
* for indexing only, so we ignore them here. `pg` already parses JSONB
* into a JS object for us.
*/
function rowToToken(row: { payload: unknown }): SintCapabilityToken {
return row.payload as SintCapabilityToken;
}

export class PgTokenStore implements TokenStore {
Expand All @@ -31,47 +33,36 @@ export class PgTokenStore implements TokenStore {
async store(token: SintCapabilityToken): Promise<void> {
await this.pool.query(
`INSERT INTO sint_tokens
(token_id, issuer, subject, resource, actions, constraints,
delegation_chain, issued_at, expires_at, revocable, signature)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
(token_id, issuer, subject, resource, expires_at, payload)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (token_id) DO UPDATE SET
issuer = EXCLUDED.issuer,
subject = EXCLUDED.subject,
resource = EXCLUDED.resource,
actions = EXCLUDED.actions,
constraints = EXCLUDED.constraints,
delegation_chain = EXCLUDED.delegation_chain,
issued_at = EXCLUDED.issued_at,
expires_at = EXCLUDED.expires_at,
revocable = EXCLUDED.revocable,
signature = EXCLUDED.signature`,
payload = EXCLUDED.payload`,
[
token.tokenId,
token.issuer,
token.subject,
token.resource,
JSON.stringify(token.actions),
JSON.stringify(token.constraints),
JSON.stringify(token.delegationChain),
token.issuedAt,
token.expiresAt,
token.revocable,
token.signature,
JSON.stringify(token),
],
);
}

async get(tokenId: UUIDv7): Promise<SintCapabilityToken | undefined> {
const result = await this.pool.query(
"SELECT * FROM sint_tokens WHERE token_id = $1",
"SELECT payload FROM sint_tokens WHERE token_id = $1",
[tokenId],
);
return result.rows.length > 0 ? rowToToken(result.rows[0]) : undefined;
}

async getBySubject(subject: string): Promise<readonly SintCapabilityToken[]> {
const result = await this.pool.query(
"SELECT * FROM sint_tokens WHERE subject = $1",
"SELECT payload FROM sint_tokens WHERE subject = $1",
[subject],
);
return result.rows.map(rowToToken);
Expand Down
Loading