|
| 1 | +--- |
| 2 | +type: example |
| 3 | +summary: Backup D1 using export API and save it on R2 |
| 4 | +tags: |
| 5 | + - Workflows |
| 6 | + - D1 |
| 7 | + - R2 |
| 8 | +languages: |
| 9 | + - Typescript |
| 10 | +pcx_content_type: configuration |
| 11 | +title: Backup and save D1 database |
| 12 | +sidebar: |
| 13 | + order: 3 |
| 14 | +description: Send invoice when shopping cart is checked out and paid for |
| 15 | + |
| 16 | +--- |
| 17 | + |
| 18 | +import { TabItem, Tabs } from "~/components" |
| 19 | + |
| 20 | +In this example, we implement a Workflow periodically triggered by a [Cron Trigger](/workers/configuration/cron-triggers) to start a backup for a D1 database using the REST API and finally store the SQL dump in an [R2](/r2) bucket. |
| 21 | + |
| 22 | +Once a Workflow instance is triggered, fetches the REST API to start an export job for a specific database. Then, it fetches the same endpoint to check if the backup job is ready and the SQL dump is available to download. |
| 23 | + |
| 24 | +As you can see, Workflows handles the responses and failures. It will retry the API calls until we get a successful response, retry fetching the backup from the URL provided, and retry saving the file to [R2](/r2). The developer doesn't have to care about any of that logic. The workflow can run until the backup file is ready, handling all the possible conditions until it is completed. |
| 25 | + |
| 26 | +This is a simplified example of backing up a [D1](/d1) database. For every step, we use the [default](/workflows/build/sleeping-and-retrying) sleeping and retrying configuration. In a real-life scenario, we would assume more steps and additional logic, but this example gives you a good idea of what you can do with Workflows. |
| 27 | + |
| 28 | +```ts |
| 29 | +import { |
| 30 | + WorkflowEntrypoint, |
| 31 | + WorkflowStep, |
| 32 | + WorkflowEvent, |
| 33 | +} from "cloudflare:workers"; |
| 34 | + |
| 35 | + |
| 36 | +// We are using R2 to store the D1 backup |
| 37 | +type Env = { |
| 38 | + BACKUP_WORKFLOW: Workflow; |
| 39 | + D1_REST_API_TOKEN: string; |
| 40 | + BACKUP_BUCKET: R2Bucket; |
| 41 | +}; |
| 42 | + |
| 43 | +// Workflow parameters: we expect accountId and databaseId |
| 44 | +type Params = { |
| 45 | + accountId: string; |
| 46 | + databaseId: string; |
| 47 | +}; |
| 48 | + |
| 49 | +// Workflow logic |
| 50 | +export class backupWorkflow extends WorkflowEntrypoint<Env, Params> { |
| 51 | + async run(event: WorkflowEvent<Params>, step: WorkflowStep) { |
| 52 | + const { accountId, databaseId } = event.payload; |
| 53 | + |
| 54 | + const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/d1/database/${databaseId}/export`; |
| 55 | + const method = "POST"; |
| 56 | + const headers = new Headers(); |
| 57 | + headers.append("Content-Type", "application/json"); |
| 58 | + headers.append("Authorization", `Bearer ${this.env.D1_REST_API_TOKEN}`); |
| 59 | + |
| 60 | + const bookmark = step.do(`Starting backup for ${databaseId}`, async () => { |
| 61 | + const payload = { output_format: "polling" }; |
| 62 | + |
| 63 | + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); |
| 64 | + const { result } = (await res.json()) as any; |
| 65 | + |
| 66 | + // If we don't get `at_bookmark` we throw to retry the step |
| 67 | + if (!result?.at_bookmark) throw new Error("Missing `at_bookmark`"); |
| 68 | + |
| 69 | + return result.at_bookmark; |
| 70 | + }); |
| 71 | + |
| 72 | + step.do("Check backup status and store it on R2", async () => { |
| 73 | + const payload = { current_bookmark: bookmark }; |
| 74 | + |
| 75 | + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); |
| 76 | + const { result } = (await res.json()) as any; |
| 77 | + |
| 78 | + // The endpoint sends `signed_url` when the backup is ready to download. |
| 79 | + // If we don't get `signed_url` we throw to retry the step. |
| 80 | + if (!result?.signed_url) throw new Error("Missing `signed_url`"); |
| 81 | + |
| 82 | + const dumpResponse = await fetch(result.signed_url); |
| 83 | + if (!dumpResponse.ok) throw new Error("Failed to fetch dump file"); |
| 84 | + |
| 85 | + // Finally, stream the file directly to R2 |
| 86 | + await this.env.BACKUP_BUCKET.put(result.filename, dumpResponse.body); |
| 87 | + }); |
| 88 | + } |
| 89 | +} |
| 90 | + |
| 91 | +export default { |
| 92 | + async fetch(req: Request, env: Env): Promise<Response> { |
| 93 | + return new Response("Not found", { status: 404 }); |
| 94 | + }, |
| 95 | + async scheduled(controller: ScheduledController, env: Env, ctx: ExecutionContext) { |
| 96 | + const params: Params = { |
| 97 | + accountId: "{accountId}", |
| 98 | + databaseId: "{databaseId}", |
| 99 | + }; |
| 100 | + const instance = await env.BACKUP_WORKFLOW.create({ params }); |
| 101 | + console.log(`Started workflow: ${instance.id}`); |
| 102 | + }, |
| 103 | +}; |
| 104 | +``` |
| 105 | + |
| 106 | +Here's a minimal package.json: |
| 107 | + |
| 108 | +```json |
| 109 | +{ |
| 110 | + "devDependencies": { |
| 111 | + "@cloudflare/workers-types": "^4.20241224.0", |
| 112 | + "wrangler": "^3.99.0" |
| 113 | + } |
| 114 | +} |
| 115 | +``` |
| 116 | + |
| 117 | +And finally wrangler.toml: |
| 118 | + |
| 119 | +import { WranglerConfig } from "~/components"; |
| 120 | + |
| 121 | +<WranglerConfig> |
| 122 | + |
| 123 | +```toml |
| 124 | +name = "backup-d1" |
| 125 | +main = "src/index.ts" |
| 126 | +compatibility_date = "2024-12-27" |
| 127 | +compatibility_flags = [ "nodejs_compat" ] |
| 128 | + |
| 129 | +[[workflows]] |
| 130 | +name = "backup-workflow" |
| 131 | +binding = "BACKUP_WORKFLOW" |
| 132 | +class_name = "backupWorkflow" |
| 133 | + |
| 134 | +[[r2_buckets]] |
| 135 | +binding = "BACKUP_BUCKET" |
| 136 | +bucket_name = "d1-backups" |
| 137 | + |
| 138 | +[triggers] |
| 139 | +crons = [ "0 0 * * *" ] |
| 140 | +``` |
| 141 | + |
| 142 | +</WranglerConfig> |
0 commit comments