Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions netlify.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,11 @@
to = "/.netlify/functions/mcp-context"
status = 200

[[redirects]]
from = "/api/reference-context"
to = "/.netlify/functions/reference-context"
status = 200

# Swagger UI
[[redirects]]
from = "/docs"
Expand Down
193 changes: 193 additions & 0 deletions netlify/functions/reference-context.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
const { json, error, options, listBlobs, blobUrl, blobHeaders } = require("./helpers");

const STORE = "reference-context";

function authenticate(event) {
const auth = event.headers["authorization"] || event.headers["Authorization"] || "";
const token = auth.startsWith("Bearer ") ? auth.slice(7) : "";
if (!token || token !== process.env.NETLIFY_AUTH_TOKEN) {
return error("Unauthorized — provide Authorization: Bearer <NETLIFY_AUTH_TOKEN>", 401);
}
return null;
}

/**
* PUT /api/reference-context?name=my-dataset.zip&chunk=0&total=10
*
* For small files (< 5MB), omit chunk/total and upload in one shot.
* For large files, split into chunks and upload each with chunk index + total.
*/
async function handlePut(event) {
const name = event.queryStringParameters?.name;
if (!name) return error("'name' query parameter is required", 400);

const body = event.body;
if (!body) return error("Request body is required", 400);

const chunk = event.queryStringParameters?.chunk;
const total = event.queryStringParameters?.total;
const contentType = event.headers["content-type"] || event.headers["Content-Type"] || "application/octet-stream";

const storeBody = event.isBase64Encoded ? Buffer.from(body, "base64") : body;

if (chunk != null && total != null) {
// Chunked upload — store each chunk separately
const chunkIdx = parseInt(chunk, 10);
const totalChunks = parseInt(total, 10);
if (isNaN(chunkIdx) || isNaN(totalChunks) || chunkIdx < 0 || chunkIdx >= totalChunks) {
return error("'chunk' must be 0..total-1 and 'total' must be a positive integer", 400);
}

const paddedChunk = String(chunkIdx).padStart(6, "0");
const chunkKey = encodeURIComponent(`${name}/_chunks/${paddedChunk}`);

const resp = await fetch(blobUrl(STORE, chunkKey), {
method: "PUT",
headers: { ...blobHeaders(), "Content-Type": contentType },
body: storeBody,
});
if (!resp.ok) return error(`Failed to store chunk ${chunkIdx}: ${resp.status}`, 500);

// Store metadata so we know how to reassemble
const metaKey = encodeURIComponent(`${name}/_meta`);
const meta = {
name,
total_chunks: totalChunks,
content_type: contentType,
updated_at: new Date().toISOString(),
};
const metaResp = await fetch(blobUrl(STORE, metaKey), {
method: "PUT",
headers: { ...blobHeaders(), "Content-Type": "application/json" },
body: JSON.stringify(meta),
});
if (!metaResp.ok) return error(`Failed to store metadata: ${metaResp.status}`, 500);

return json({ message: `Chunk ${chunkIdx}/${totalChunks} stored`, name, chunk: chunkIdx, total_chunks: totalChunks }, 201);
}

// Single-shot upload (no chunking)
const key = encodeURIComponent(name);
const resp = await fetch(blobUrl(STORE, key), {
method: "PUT",
headers: { ...blobHeaders(), "Content-Type": contentType },
body: storeBody,
});
if (!resp.ok) return error(`Failed to store file: ${resp.status}`, 500);

return json({ message: "File stored", name, content_type: contentType }, 201);
}

/**
* GET /api/reference-context
* — no params: list all stored references
* — ?name=foo.zip: return metadata + download info for that file
* — ?name=foo.zip&chunk=3: return a specific chunk's raw bytes
*/
async function handleGet(event) {
const name = event.queryStringParameters?.name;
const chunk = event.queryStringParameters?.chunk;

if (!name) {
// List all references
const blobs = await listBlobs(STORE);
const entries = blobs.map((b) => decodeURIComponent(b.key));

// Group by top-level name, filtering out internal chunk/meta keys
const files = new Map();
for (const key of entries) {
const topName = key.split("/_chunks/")[0].split("/_meta")[0];
if (!files.has(topName)) files.set(topName, { name: topName, chunked: false });
if (key.includes("/_chunks/")) files.get(topName).chunked = true;
if (key.includes("/_meta")) files.get(topName).has_meta = true;
}

// Also include single-shot uploads (keys without /_chunks/ or /_meta)
const result = [];
for (const [, info] of files) {
result.push({ name: info.name, chunked: info.chunked });
}

return json({ count: result.length, files: result });
}

// Return specific chunk raw data
if (chunk != null) {
const paddedChunk = String(parseInt(chunk, 10)).padStart(6, "0");
const chunkKey = encodeURIComponent(`${name}/_chunks/${paddedChunk}`);
const resp = await fetch(blobUrl(STORE, chunkKey), { headers: blobHeaders() });
if (!resp.ok) return error(`Chunk ${chunk} not found for '${name}'`, 404);

const buf = Buffer.from(await resp.arrayBuffer());
const ct = resp.headers.get("content-type") || "application/octet-stream";

return {
statusCode: 200,
headers: {
"Content-Type": ct,
"Access-Control-Allow-Origin": "*",
},
body: buf.toString("base64"),
isBase64Encoded: true,
};
}

// Check if it's a chunked upload (has _meta)
const metaKey = encodeURIComponent(`${name}/_meta`);
const metaResp = await fetch(blobUrl(STORE, metaKey), { headers: blobHeaders() });

if (metaResp.ok) {
const meta = await metaResp.json();

// Count how many chunks are actually stored
const blobs = await listBlobs(STORE);
const chunkPrefix = `${name}/_chunks/`;
const uploadedChunks = blobs
.map((b) => decodeURIComponent(b.key))
.filter((k) => k.startsWith(chunkPrefix))
.sort();

return json({
name,
chunked: true,
total_chunks: meta.total_chunks,
uploaded_chunks: uploadedChunks.length,
complete: uploadedChunks.length === meta.total_chunks,
content_type: meta.content_type,
updated_at: meta.updated_at,
});
}

// Single-shot file — return it directly
const key = encodeURIComponent(name);
const resp = await fetch(blobUrl(STORE, key), { headers: blobHeaders() });
if (!resp.ok) return error(`File '${name}' not found`, 404);

const buf = Buffer.from(await resp.arrayBuffer());
const ct = resp.headers.get("content-type") || "application/octet-stream";

return {
statusCode: 200,
headers: {
"Content-Type": ct,
"Access-Control-Allow-Origin": "*",
},
body: buf.toString("base64"),
isBase64Encoded: true,
};
}

exports.handler = async (event) => {
if (event.httpMethod === "OPTIONS") return options();

const authError = authenticate(event);
if (authError) return authError;

try {
if (event.httpMethod === "PUT") return await handlePut(event);
if (event.httpMethod === "GET") return await handleGet(event);
return error("Method not allowed", 405);
} catch (e) {
return error(`Failed to process reference context: ${e.message}`);
}
};
168 changes: 168 additions & 0 deletions public/swagger/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,112 @@ paths:
schema:
$ref: "#/components/schemas/Error"

/reference-context:
get:
summary: List or retrieve reference files
description: |
Without parameters, lists all stored reference files.
With `name`, returns metadata for a chunked upload or the raw file for single-shot uploads.
With `name` + `chunk`, returns the raw bytes of a specific chunk.
operationId: getReferenceContext
tags: [Reference]
parameters:
- name: name
in: query
required: false
description: "File name (e.g. my-dataset.zip)"
schema:
type: string
- name: chunk
in: query
required: false
description: "Chunk index to retrieve (0-based). Requires `name`."
schema:
type: integer
responses:
"200":
description: File listing, metadata, or raw file content
content:
application/json:
schema:
$ref: "#/components/schemas/ReferenceContextListResponse"
application/octet-stream:
schema:
type: string
format: binary
"404":
description: File or chunk not found
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
put:
summary: Upload a reference file (single or chunked)
description: |
Stores any file in the reference-context blob store. Accepts any content type.

**Single-shot upload** (files < 5MB): just provide `name`.

**Chunked upload** (large files): split your file into chunks and upload each
with `name`, `chunk` (0-based index), and `total` (total number of chunks).
Each chunk should be ≤ 5MB to stay within Netlify's request body limit.

Example for a 50MB zip split into 10 chunks:
```
for i in $(seq 0 9); do
curl -X PUT \
"https://route-runtime-service.netlify.app/api/reference-context?name=data.zip&chunk=$i&total=10" \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/zip" \
--data-binary @chunk_$i
done
```
operationId: putReferenceContext
tags: [Reference]
parameters:
- name: name
in: query
required: true
description: "File name / identifier"
schema:
type: string
example: "my-dataset.zip"
- name: chunk
in: query
required: false
description: "Chunk index (0-based). Required for chunked uploads."
schema:
type: integer
example: 0
- name: total
in: query
required: false
description: "Total number of chunks. Required for chunked uploads."
schema:
type: integer
example: 10
requestBody:
required: true
content:
application/octet-stream:
schema:
type: string
format: binary
description: Raw file bytes (any content type accepted)
responses:
"201":
description: File or chunk stored successfully
content:
application/json:
schema:
$ref: "#/components/schemas/ReferenceContextPutResponse"
"400":
description: Missing or invalid parameters
content:
application/json:
schema:
$ref: "#/components/schemas/Error"

components:
schemas:
Error:
Expand Down Expand Up @@ -547,6 +653,68 @@ components:
type: string
description: All oncall entries across services (when listing all)

ReferenceContextListResponse:
type: object
properties:
count:
type: integer
example: 3
files:
type: array
items:
type: object
properties:
name:
type: string
example: my-dataset.zip
chunked:
type: boolean
example: true

ReferenceContextMetaResponse:
type: object
properties:
name:
type: string
example: my-dataset.zip
chunked:
type: boolean
example: true
total_chunks:
type: integer
example: 10
uploaded_chunks:
type: integer
example: 7
complete:
type: boolean
example: false
content_type:
type: string
example: application/zip
updated_at:
type: string
format: date-time

ReferenceContextPutResponse:
type: object
properties:
message:
type: string
example: "Chunk 0/10 stored"
name:
type: string
example: my-dataset.zip
chunk:
type: integer
example: 0
total_chunks:
type: integer
example: 10
content_type:
type: string
example: application/zip

SnapshotResponse:
type: object
properties:
Expand Down
Loading