|
| 1 | +--- |
| 2 | +type: example |
| 3 | +summary: Backup D1 using export API and save it on R2 |
| 4 | +tags: |
| 5 | + - Workflows |
| 6 | + - D1 |
| 7 | + - R2 |
| 8 | +languages: |
| 9 | + - Typescript |
| 10 | +pcx_content_type: configuration |
| 11 | +title: Backup and save D1 database |
| 12 | +sidebar: |
| 13 | + order: 3 |
| 14 | +description: Send invoice when shopping cart is checked out and paid for |
| 15 | + |
| 16 | +--- |
| 17 | + |
| 18 | +import { TabItem, Tabs } from "~/components" |
| 19 | + |
| 20 | +WIP |
| 21 | + |
| 22 | +In this example, we implement a Workflow that is triggered by a Cron Trigger to back up your D1 database and store the SQL dump into an R2 bucket. |
| 23 | + |
| 24 | +Once a Workflow instance is triggered, it starts polling a [D1](/d1) database for the cart ID until it has been checked out. Once the shopping cart is checked out, we proceed to process the payment with an external provider doing a fetch POST. Finally, assuming everything goes well, we try to send an email using [Email Workers](/email-routing/email-workers/) with the invoice to the customer. |
| 25 | + |
| 26 | +As you can see, Workflows handles all the different service responses and failures; it will retry D1 until the cart is checked out, retry the payment processor if it fails for some reason, and retry sending the email with the invoice if it can't. The developer doesn't have to care about any of that logic, and the workflow can run for hours, handling all the possible conditions until it is completed. |
| 27 | + |
| 28 | +This is a simplified example of processing a shopping cart. We would assume more steps and additional logic in a real-life scenario, but this example gives you a good idea of what you can do with Workflows. |
| 29 | + |
| 30 | +```ts |
| 31 | +import { |
| 32 | + WorkflowEntrypoint, |
| 33 | + WorkflowStep, |
| 34 | + WorkflowEvent, |
| 35 | +} from "cloudflare:workers"; |
| 36 | + |
| 37 | + |
| 38 | +// We are using R2 to store the D1 backup |
| 39 | +type Env = { |
| 40 | + BACKUP_WORKFLOW: Workflow; |
| 41 | + D1_REST_API_TOKEN: string; |
| 42 | + BACKUP_BUCKET: R2Bucket; |
| 43 | +}; |
| 44 | + |
| 45 | +// Workflow parameters: we expect accountId and databaseId |
| 46 | +type Params = { |
| 47 | + accountId: string; |
| 48 | + databaseId: string; |
| 49 | +}; |
| 50 | + |
| 51 | +// Workflow logic |
| 52 | +export class backupWorkflow extends WorkflowEntrypoint<Env, Params> { |
| 53 | + async run(event: WorkflowEvent<Params>, step: WorkflowStep) { |
| 54 | + const { accountId, databaseId } = event.payload; |
| 55 | + |
| 56 | + const url = `https://api.cloudflare.com/client/v4/accounts/${accountId}/d1/database/${databaseId}/export`; |
| 57 | + const method = "POST"; |
| 58 | + const headers = new Headers(); |
| 59 | + headers.append("Content-Type", "application/json"); |
| 60 | + headers.append("Authorization", `Bearer ${this.env.D1_REST_API_TOKEN}`); |
| 61 | + |
| 62 | + const bookmark = step.do(`Starting backup for ${databaseId}`, async () => { |
| 63 | + const payload = { output_format: "polling" }; |
| 64 | + |
| 65 | + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); |
| 66 | + const { result } = (await res.json()) as any; |
| 67 | + |
| 68 | + // If we don't get `at_bookmark` we throw to retry the step |
| 69 | + if (!result?.at_bookmark) throw new Error("Missing `at_bookmark`"); |
| 70 | + |
| 71 | + return result.at_bookmark; |
| 72 | + }); |
| 73 | + |
| 74 | + step.do("Check backup status and store it on R2", async () => { |
| 75 | + const payload = { current_bookmark: bookmark }; |
| 76 | + |
| 77 | + const res = await fetch(url, { method, headers, body: JSON.stringify(payload) }); |
| 78 | + const { result } = (await res.json()) as any; |
| 79 | + |
| 80 | + // The endpoint sends `signed_url` when the backup is ready to download. |
| 81 | + // If we don't get `signed_url` we throw to retry the step. |
| 82 | + if (!result?.signed_url) throw new Error("Missing `signed_url`"); |
| 83 | + |
| 84 | + const dumpResponse = await fetch(result.signed_url); |
| 85 | + if (!dumpResponse.ok) throw new Error("Failed to fetch dump file"); |
| 86 | + |
| 87 | + // Finally, stream the file directly to R2 |
| 88 | + await this.env.BACKUP_BUCKET.put(result.filename, dumpResponse.body); |
| 89 | + }); |
| 90 | + } |
| 91 | +} |
| 92 | + |
| 93 | +export default { |
| 94 | + async fetch(req: Request, env: Env): Promise<Response> { |
| 95 | + return new Response("Not found", { status: 404 }); |
| 96 | + }, |
| 97 | + async scheduled(controller: ScheduledController, env: Env, ctx: ExecutionContext) { |
| 98 | + const params: Params = { |
| 99 | + accountId: "{accountId}", |
| 100 | + databaseId: "{databaseId}", |
| 101 | + }; |
| 102 | + const instance = await env.BACKUP_WORKFLOW.create({ params }); |
| 103 | + console.log(`Started workflow: ${instance.id}`); |
| 104 | + }, |
| 105 | +}; |
| 106 | +``` |
| 107 | + |
| 108 | +Here's a minimal package.json: |
| 109 | + |
| 110 | +```json |
| 111 | +{ |
| 112 | + "devDependencies": { |
| 113 | + "@cloudflare/workers-types": "^4.20241224.0", |
| 114 | + "wrangler": "^3.99.0" |
| 115 | + } |
| 116 | +} |
| 117 | +``` |
| 118 | + |
| 119 | +And finally wrangler.toml: |
| 120 | + |
| 121 | +import { WranglerConfig } from "~/components"; |
| 122 | + |
| 123 | +<WranglerConfig> |
| 124 | + |
| 125 | +```toml |
| 126 | +name = "backup-d1" |
| 127 | +main = "src/index.ts" |
| 128 | +compatibility_date = "2024-12-27" |
| 129 | +compatibility_flags = [ "nodejs_compat" ] |
| 130 | + |
| 131 | +[[workflows]] |
| 132 | +name = "backup-workflow" |
| 133 | +binding = "BACKUP_WORKFLOW" |
| 134 | +class_name = "backupWorkflow" |
| 135 | + |
| 136 | +[[r2_buckets]] |
| 137 | +binding = "BACKUP_BUCKET" |
| 138 | +bucket_name = "d1-backups" |
| 139 | + |
| 140 | +[triggers] |
| 141 | +crons = [ "0 0 * * *" ] |
| 142 | +``` |
| 143 | + |
| 144 | +</WranglerConfig> |
0 commit comments