From ad47359ec4ad3f62e63ec0f6ab69e207b7d9dbab Mon Sep 17 00:00:00 2001 From: Joaquin Gimenez Date: Mon, 30 Dec 2024 18:05:21 -0300 Subject: [PATCH] Add Workflow example (#18961) --- .../docs/workflows/examples/backup-d1.mdx | 142 ++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 src/content/docs/workflows/examples/backup-d1.mdx diff --git a/src/content/docs/workflows/examples/backup-d1.mdx b/src/content/docs/workflows/examples/backup-d1.mdx new file mode 100644 index 00000000000000..59784f51416cfd --- /dev/null +++ b/src/content/docs/workflows/examples/backup-d1.mdx @@ -0,0 +1,142 @@ +--- +type: example +summary: Backup D1 using export API and save it on R2 +tags: + - Workflows + - D1 + - R2 +languages: + - Typescript +pcx_content_type: configuration +title: Backup and save D1 database +sidebar: + order: 3 +description: Send invoice when shopping cart is checked out and paid for + +--- + +import { TabItem, Tabs } from "~/components" + +In this example, we implement a Workflow periodically triggered by a [Cron Trigger](/workers/configuration/cron-triggers) to start a backup for a D1 database using the REST API and finally store the SQL dump in an [R2](/r2) bucket. + +Once a Workflow instance is triggered, fetches the REST API to start an export job for a specific database. Then, it fetches the same endpoint to check if the backup job is ready and the SQL dump is available to download. + +As you can see, Workflows handles the responses and failures. It will retry the API calls until we get a successful response, retry fetching the backup from the URL provided, and retry saving the file to [R2](/r2). The developer doesn't have to care about any of that logic. The workflow can run until the backup file is ready, handling all the possible conditions until it is completed. + +This is a simplified example of backing up a [D1](/d1) database. For every step, we use the [default](/workflows/build/sleeping-and-retrying) sleeping and retrying configuration. In a real-life scenario, we would assume more steps and additional logic, but this example gives you a good idea of what you can do with Workflows. + +```ts +import { + WorkflowEntrypoint, + WorkflowStep, + WorkflowEvent, +} from "cloudflare:workers"; + + +// We are using R2 to store the D1 backup +type Env = { + BACKUP_WORKFLOW: Workflow; + D1_REST_API_TOKEN: string; + BACKUP_BUCKET: R2Bucket; +}; + +// Workflow parameters: we expect accountId and databaseId +type Params = { + accountId: string; + databaseId: string; +}; + +// Workflow logic +export class backupWorkflow extends WorkflowEntrypoint { + async run(event: WorkflowEvent, step: WorkflowStep) { + const { accountId, databaseId } = event.payload; + + const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/d1/database/${databaseId}/export`; + const method = "POST"; + const headers = new Headers(); + headers.append("Content-Type", "application/json"); + headers.append("Authorization", `Bearer ${this.env.D1_REST_API_TOKEN}`); + + const bookmark = step.do(`Starting backup for ${databaseId}`, async () => { + const payload = { output_format: "polling" }; + + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); + const { result } = (await res.json()) as any; + + // If we don't get `at_bookmark` we throw to retry the step + if (!result?.at_bookmark) throw new Error("Missing `at_bookmark`"); + + return result.at_bookmark; + }); + + step.do("Check backup status and store it on R2", async () => { + const payload = { current_bookmark: bookmark }; + + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); + const { result } = (await res.json()) as any; + + // The endpoint sends `signed_url` when the backup is ready to download. + // If we don't get `signed_url` we throw to retry the step. + if (!result?.signed_url) throw new Error("Missing `signed_url`"); + + const dumpResponse = await fetch(result.signed_url); + if (!dumpResponse.ok) throw new Error("Failed to fetch dump file"); + + // Finally, stream the file directly to R2 + await this.env.BACKUP_BUCKET.put(result.filename, dumpResponse.body); + }); + } +} + +export default { + async fetch(req: Request, env: Env): Promise { + return new Response("Not found", { status: 404 }); + }, + async scheduled(controller: ScheduledController, env: Env, ctx: ExecutionContext) { + const params: Params = { + accountId: "{accountId}", + databaseId: "{databaseId}", + }; + const instance = await env.BACKUP_WORKFLOW.create({ params }); + console.log(`Started workflow: ${instance.id}`); + }, +}; +``` + +Here's a minimal package.json: + +```json +{ + "devDependencies": { + "@cloudflare/workers-types": "^4.20241224.0", + "wrangler": "^3.99.0" + } +} +``` + +And finally wrangler.toml: + +import { WranglerConfig } from "~/components"; + + + +```toml +name = "backup-d1" +main = "src/index.ts" +compatibility_date = "2024-12-27" +compatibility_flags = [ "nodejs_compat" ] + +[[workflows]] +name = "backup-workflow" +binding = "BACKUP_WORKFLOW" +class_name = "backupWorkflow" + +[[r2_buckets]] +binding = "BACKUP_BUCKET" +bucket_name = "d1-backups" + +[triggers] +crons = [ "0 0 * * *" ] +``` + + \ No newline at end of file