Skip to content

Commit dffcd9f

Browse files
committed
v0.2.0: Add --limit option to add LIMIT to each SELECT
Also add formatting/Prettier.
1 parent 0707da9 commit dffcd9f

File tree

8 files changed

+136
-58
lines changed

8 files changed

+136
-58
lines changed

.prettierignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
pnpm-lock.yaml

CHANGELOG.md

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
1+
## v0.2.0 - 2023-11-06
2+
3+
- Add `--limit` option to add LIMIT to each SELECT query to fix D1
4+
`Isolate Has exceeded Memory Size`. Defaults to 1000.
5+
16
## v0.1.1 - 2023-10-10
27

3-
- Fix running script with `npx` (missing shebang)
8+
- Fix running script with `npx` (missing shebang).
49

510
## v0.1.0 - 2023-10-10
611

7-
- Initial release
12+
- Initial release.

README.md

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,9 @@ the
66
to query for table definitions and data, then outputs SQL commands to recreate
77
the database as-is.
88

9-
This script has only been tested on small databases (~700KB). Please report any bugs
10-
using [GitHub Issues](https://github.com/Cretezy/cloudflare-d1-backup/issues).
9+
This script has only been tested on small databases (~700KB). Please report any
10+
bugs using
11+
[GitHub Issues](https://github.com/Cretezy/cloudflare-d1-backup/issues).
1112

1213
Based on
1314
[nora-soderlund/cloudflare-d1-backups](https://github.com/nora-soderlund/cloudflare-d1-backups),
@@ -36,6 +37,10 @@ npx @cretezy/cloudflare-d1-backup backup.sql
3637

3738
The CLI also supports reading from `.env`.
3839

40+
You may also pass the `--limit` to add a LIMIT clause for each SELECT query.
41+
Default is 1000. You may need to lower if D1 crashes due to
42+
`Isolate Has exceeded Memory Size`. You can increase to speed up exports.
43+
3944
### Library
4045

4146
```bash
@@ -49,6 +54,8 @@ const backup = await createBackup({
4954
accountId: "...",
5055
databaseId: "...",
5156
apiKey: "...",
57+
// Optional, see note above on --limit
58+
limit: 1000,
5259
});
5360
```
5461

@@ -60,4 +67,4 @@ const backup = await createBackup({
6067
npx wrangler d1 execute <database> --file=<backup.sql>
6168
```
6269

63-
`<database>` can be the ID or name of the D1 database.
70+
`<database>` must be the ID or name of the D1 database.

package.json

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
{
22
"name": "@cretezy/cloudflare-d1-backup",
33
"main": "dist/index.js",
4-
"version": "0.1.1",
4+
"version": "0.2.0",
55
"license": "Apache-2.0",
66
"type": "module",
77
"scripts": {
88
"build": "tsc",
99
"start": "ts-node-esm src/cli.ts",
10-
"prepack": "pnpm build"
10+
"prepack": "pnpm build",
11+
"format": "prettier -w ."
1112
},
1213
"bin": {
1314
"cloudflare-d1-backup": "./dist/cli.js"
@@ -19,6 +20,7 @@
1920
],
2021
"devDependencies": {
2122
"@types/node": "^20.8.4",
23+
"prettier": "^3.0.3",
2224
"ts-node": "^10.9.1",
2325
"typescript": "^5.2.2"
2426
},

pnpm-lock.yaml

Lines changed: 9 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/cli.ts

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,40 @@
33
import fs from "fs";
44
import "dotenv/config";
55
import { createBackup } from "./index.js";
6+
import { parseArgs } from "node:util";
67

7-
const path = process.argv[2];
8+
const { values, positionals } = parseArgs({
9+
options: {
10+
limit: {
11+
type: "string",
12+
short: "l",
13+
default: "1000",
14+
},
15+
},
16+
allowPositionals: true,
17+
});
18+
19+
const path = positionals[0];
820
if (!path) {
921
console.error("Must supply path as first argument.");
1022
process.exit(1);
1123
}
1224

25+
const limit = parseInt(values.limit ?? "");
26+
if (Number.isNaN(limit)) {
27+
console.error("Limit must be a number.");
28+
process.exit(1);
29+
}
30+
if (limit <= 0) {
31+
console.error("Limit must be higher than 0.");
32+
process.exit(1);
33+
}
34+
1335
const backup = await createBackup({
1436
accountId: process.env.CLOUDFLARE_D1_ACCOUNT_ID!,
1537
databaseId: process.env.CLOUDFLARE_D1_DATABASE_ID!,
1638
apiKey: process.env.CLOUDFLARE_D1_API_KEY!,
39+
limit,
1740
});
1841

19-
fs.writeFileSync(process.argv[2], backup);
42+
fs.writeFileSync(path, backup);

src/index.ts

Lines changed: 79 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,22 @@
11
import fetch from "node-fetch";
22

3-
export async function createBackup(
4-
options: { accountId: string; databaseId: string; apiKey: string },
5-
) {
3+
export async function createBackup(options: {
4+
accountId: string;
5+
databaseId: string;
6+
apiKey: string;
7+
// Default to 1000
8+
limit?: number;
9+
}) {
10+
const limit = options.limit ?? 1000;
611
const lines: string[] = [];
712
function append(command: string) {
813
lines.push(command);
914
}
1015

11-
async function fetchD1(sql: string, params?: unknown[]) {
16+
async function fetchD1<T = Record<string, unknown>>(
17+
sql: string,
18+
params?: unknown[],
19+
) {
1220
const response = await fetch(
1321
`https://api.cloudflare.com/client/v4/accounts/${options.accountId}/d1/database/${options.databaseId}/query`,
1422
{
@@ -29,22 +37,21 @@ export async function createBackup(
2937
throw new Error(
3038
`D1 Error: ${body.errors
3139
.map((error: { message: string }) => error.message)
32-
.join(", ")
33-
}`,
40+
.join(", ")}`,
3441
);
3542
}
3643

3744
return body.result as {
3845
meta: {};
39-
results: Record<string, unknown>[];
46+
results: T[];
4047
success: boolean;
4148
}[];
4249
}
4350

4451
let writableSchema: boolean = false;
4552

4653
{
47-
const [tables] = await fetchD1(
54+
const [tables] = await fetchD1<{ name: string; type: string; sql: string }>(
4855
"SELECT name, type, sql FROM sqlite_master WHERE sql IS NOT NULL AND type = 'table' ORDER BY name",
4956
);
5057

@@ -53,7 +60,6 @@ export async function createBackup(
5360
console.warn(`Table name is not string: ${table.name}`);
5461
continue;
5562
}
56-
5763
if (table.name.startsWith("_cf_")) {
5864
continue; // we're not allowed access to these
5965
} else if (table.name === "sqlite_sequence") {
@@ -75,8 +81,10 @@ export async function createBackup(
7581
const tableName = table.name.replace("'", "''");
7682

7783
append(
78-
`INSERT INTO sqlite_master (type, name, tbl_name, rootpage, sql) VALUES ('table', '${tableName}', '${tableName}', 0, '${table.sql.replace(/'/g, "''")
79-
}');`,
84+
`INSERT INTO sqlite_master (type, name, tbl_name, rootpage, sql) VALUES ('table', '${tableName}', '${tableName}', 0, '${table.sql.replace(
85+
/'/g,
86+
"''",
87+
)}');`,
8088
);
8189

8290
continue;
@@ -85,8 +93,9 @@ export async function createBackup(
8593
table.sql.toUpperCase().startsWith("CREATE TABLE ")
8694
) {
8795
append(
88-
`CREATE TABLE IF NOT EXISTS ${table.sql.substring("CREATE TABLE ".length)
89-
};`,
96+
`CREATE TABLE IF NOT EXISTS ${table.sql.substring(
97+
"CREATE TABLE ".length,
98+
)};`,
9099
);
91100
} else {
92101
append(`${table.sql};`);
@@ -106,47 +115,72 @@ export async function createBackup(
106115
if (tableRow.results[0]) {
107116
const columnNames = Object.keys(tableRow.results[0]);
108117

109-
const queries = [];
110-
111-
// D1 said maximum depth is 20, but the limit is seemingly at 9.
112-
for (let index = 0; index < columnNames.length; index += 9) {
113-
const currentColumnNames = columnNames.slice(
114-
index,
115-
Math.min(index + 9, columnNames.length),
116-
);
118+
const [tableRowCount] = await fetchD1<{ count: number }>(
119+
`SELECT COUNT(*) AS count FROM "${tableNameIndent}"`,
120+
);
117121

118-
queries.push(
119-
`SELECT '${currentColumnNames.map((columnName) =>
120-
`'||quote("${columnName.replace('"', '""')}")||'`
121-
).join(", ")
122-
}' AS partialCommand FROM "${tableNameIndent}"`,
123-
);
122+
if (tableRowCount === null) {
123+
throw new Error("Failed to get table row count from table.");
124124
}
125125

126-
const results = await fetchD1(queries.join(";"));
126+
for (
127+
let offset = 0;
128+
offset <= tableRowCount.results[0].count;
129+
offset += limit
130+
) {
131+
const queries = [];
132+
133+
// D1 said maximum depth is 20, but the limit is seemingly at 9.
134+
for (let index = 0; index < columnNames.length; index += 9) {
135+
const currentColumnNames = columnNames.slice(
136+
index,
137+
Math.min(index + 9, columnNames.length),
138+
);
127139

128-
if (results.length && results[0].results.length) {
129-
for (let result = 1; result < results.length; result++) {
130-
if (
131-
results[result].results.length !== results[0].results.length
132-
) {
133-
throw new Error(
134-
"Failed to split expression tree into several queries properly.",
135-
);
136-
}
140+
queries.push(
141+
`SELECT '${currentColumnNames
142+
.map(
143+
(columnName) =>
144+
`'||quote("${columnName.replace('"', '""')}")||'`,
145+
)
146+
.join(
147+
", ",
148+
)}' AS partialCommand FROM "${tableNameIndent}" LIMIT ${limit} OFFSET ${offset}`,
149+
);
137150
}
138151

139-
for (let row = 0; row < results[0].results.length; row++) {
140-
let columns = [];
152+
const results = await fetchD1<{ partialCommand: string }>(
153+
queries.join(";\n"),
154+
);
141155

142-
for (let result = 0; result < results.length; result++) {
143-
columns.push(results[result].results[row].partialCommand);
156+
if (results.length && results[0].results.length) {
157+
for (let result = 1; result < results.length; result++) {
158+
if (
159+
results[result].results.length !== results[0].results.length
160+
) {
161+
throw new Error(
162+
"Failed to split expression tree into several queries properly.",
163+
);
164+
}
144165
}
145166

146-
append(
147-
`INSERT INTO "${tableNameIndent}" (${columnNames.map((columnName) => `"${columnName}"`).join(", ")
148-
}) VALUES (${columns.join(", ")});`,
149-
);
167+
for (let row = 0; row < results[0].results.length; row++) {
168+
let columns: string[] = [];
169+
170+
for (let result = 0; result < results.length; result++) {
171+
columns.push(
172+
results[result].results[row].partialCommand as string,
173+
);
174+
}
175+
176+
append(
177+
`INSERT INTO "${tableNameIndent}" (${columnNames
178+
.map((columnName) => `"${columnName}"`)
179+
.join(", ")}) VALUES (${columns
180+
.map((column) => column.replace("\n", "\\n"))
181+
.join(", ")});`,
182+
);
183+
}
150184
}
151185
}
152186
}

tsconfig.json

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
11
{
22
"compilerOptions": {
33
"target": "ES2022",
4-
"lib": [
5-
"ES2022",
6-
"DOM"
7-
],
4+
"lib": ["ES2022", "DOM"],
85
"module": "NodeNext",
96
"strict": true,
107
"outDir": "dist",

0 commit comments

Comments
 (0)