Skip to content

Commit c1c68fb

Browse files
feat(plugin-import-export): adds limit and page fields to export options (#13380)
### What: This PR adds `limit` and `page` fields to the export options, allowing users to control the number of documents exported and the page from which to start the export. It also enforces that limit must be a positive multiple of 100. ### Why: This feature is needed to provide pagination support for large exports, enabling users to export manageable chunks of data rather than the entire dataset at once. Enforcing multiples-of-100 for `limit` ensures consistent chunking behavior and prevents unexpected export issues. ### How: - The `limit` field determines the maximum number of documents to export and **must be a positive multiple of 100**. - The `page` field defines the starting page of the export and is displayed only when a `limit` is specified. - If `limit` is cleared, the `page` resets to 1 to maintain consistency. - Export logic was adjusted to respect the `limit` and `page` values when fetching documents. --------- Co-authored-by: Patrik Kozak <[email protected]>
1 parent 3e65111 commit c1c68fb

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+411
-46
lines changed

packages/plugin-import-export/src/components/ExportSaveButton/index.tsx

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,17 @@ export const ExportSaveButton: React.FC = () => {
7373
}
7474

7575
if (!response.ok) {
76-
throw new Error('Failed to download file')
76+
// Try to parse the error message from the JSON response
77+
let errorMsg = 'Failed to download file'
78+
try {
79+
const errorJson = await response.json()
80+
if (errorJson?.errors?.[0]?.message) {
81+
errorMsg = errorJson.errors[0].message
82+
}
83+
} catch {
84+
// Ignore JSON parse errors, fallback to generic message
85+
}
86+
throw new Error(errorMsg)
7787
}
7888

7989
const fileStream = response.body
@@ -98,9 +108,8 @@ export const ExportSaveButton: React.FC = () => {
98108
a.click()
99109
document.body.removeChild(a)
100110
URL.revokeObjectURL(url)
101-
} catch (error) {
102-
console.error('Error downloading file:', error)
103-
toast.error('Error downloading file')
111+
} catch (error: any) {
112+
toast.error(error.message || 'Error downloading file')
104113
}
105114
}
106115

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
.page-field {
2+
--field-width: 33.3333%;
3+
}
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
'use client'
2+
3+
import type { NumberFieldClientComponent } from 'payload'
4+
5+
import { NumberField, useField } from '@payloadcms/ui'
6+
import React, { useEffect } from 'react'
7+
8+
import './index.scss'
9+
10+
const baseClass = 'page-field'
11+
12+
export const Page: NumberFieldClientComponent = (props) => {
13+
const { setValue } = useField<number>()
14+
const { value: limitValue } = useField<number>({ path: 'limit' })
15+
16+
// Effect to reset page to 1 if limit is removed
17+
useEffect(() => {
18+
if (!limitValue) {
19+
setValue(1) // Reset page to 1
20+
}
21+
}, [limitValue, setValue])
22+
23+
return (
24+
<div className={baseClass}>
25+
<NumberField
26+
field={{
27+
name: props.field.name,
28+
admin: {
29+
autoComplete: undefined,
30+
placeholder: undefined,
31+
step: 1,
32+
},
33+
label: props.field.label,
34+
min: 1,
35+
}}
36+
onChange={(value) => setValue(value ?? 1)} // Update the page value on change
37+
path={props.path}
38+
/>
39+
</div>
40+
)
41+
}

packages/plugin-import-export/src/components/Preview/index.tsx

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ export const Preview = () => {
2828
const { collection } = useImportExport()
2929
const { config } = useConfig()
3030
const { value: where } = useField({ path: 'where' })
31+
const { value: page } = useField({ path: 'page' })
3132
const { value: limit } = useField<number>({ path: 'limit' })
3233
const { value: fields } = useField<string[]>({ path: 'fields' })
3334
const { value: sort } = useField({ path: 'sort' })
@@ -71,6 +72,7 @@ export const Preview = () => {
7172
format,
7273
limit,
7374
locale,
75+
page,
7476
sort,
7577
where,
7678
}),
@@ -168,6 +170,7 @@ export const Preview = () => {
168170
i18n,
169171
limit,
170172
locale,
173+
page,
171174
sort,
172175
where,
173176
])
Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
.sort-by-fields {
2-
display: block;
3-
width: 33%;
2+
--field-width: 25%;
43
}

packages/plugin-import-export/src/components/SortBy/index.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import React, { useEffect, useState } from 'react'
1515

1616
import { reduceFields } from '../FieldsToExport/reduceFields.js'
1717
import { useImportExport } from '../ImportExportProvider/index.js'
18+
import './index.scss'
1819

1920
const baseClass = 'sort-by-fields'
2021

@@ -71,7 +72,7 @@ export const SortBy: SelectFieldClientComponent = (props) => {
7172
}
7273

7374
return (
74-
<div className={baseClass} style={{ '--field-width': '33%' } as React.CSSProperties}>
75+
<div className={baseClass}>
7576
<FieldLabel label={props.field.label} path={props.path} />
7677
<ReactSelect
7778
className={baseClass}

packages/plugin-import-export/src/export/createExport.ts

Lines changed: 89 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { APIError } from 'payload'
66
import { Readable } from 'stream'
77

88
import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js'
9+
import { validateLimitValue } from '../utilities/validateLimitValue.js'
910
import { flattenObject } from './flattenObject.js'
1011
import { getCustomFieldFunctions } from './getCustomFieldFunctions.js'
1112
import { getFilename } from './getFilename.js'
@@ -23,8 +24,10 @@ export type Export = {
2324
format: 'csv' | 'json'
2425
globals?: string[]
2526
id: number | string
27+
limit?: number
2628
locale?: string
2729
name: string
30+
page?: number
2831
slug: string
2932
sort: Sort
3033
user: string
@@ -57,6 +60,8 @@ export const createExport = async (args: CreateExportArgs) => {
5760
locale: localeInput,
5861
sort,
5962
user,
63+
page,
64+
limit: incomingLimit,
6065
where,
6166
},
6267
req: { locale: localeArg, payload },
@@ -87,14 +92,30 @@ export const createExport = async (args: CreateExportArgs) => {
8792
req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale })
8893
}
8994

95+
const batchSize = 100 // fixed per request
96+
97+
const hardLimit =
98+
typeof incomingLimit === 'number' && incomingLimit > 0 ? incomingLimit : undefined
99+
100+
const { totalDocs } = await payload.count({
101+
collection: collectionSlug,
102+
user,
103+
locale,
104+
overrideAccess: false,
105+
})
106+
107+
const totalPages = Math.max(1, Math.ceil(totalDocs / batchSize))
108+
const requestedPage = page || 1
109+
const adjustedPage = requestedPage > totalPages ? 1 : requestedPage
110+
90111
const findArgs = {
91112
collection: collectionSlug,
92113
depth: 1,
93114
draft: drafts === 'yes',
94-
limit: 100,
115+
limit: batchSize,
95116
locale,
96117
overrideAccess: false,
97-
page: 0,
118+
page: 0, // The page will be incremented manually in the loop
98119
select,
99120
sort,
100121
user,
@@ -156,15 +177,37 @@ export const createExport = async (args: CreateExportArgs) => {
156177
req.payload.logger.debug('Pre-scanning all columns before streaming')
157178
}
158179

180+
const limitErrorMsg = validateLimitValue(
181+
incomingLimit,
182+
req.t,
183+
batchSize, // step i.e. 100
184+
)
185+
if (limitErrorMsg) {
186+
throw new APIError(limitErrorMsg)
187+
}
188+
159189
const allColumns: string[] = []
160190

161191
if (isCSV) {
162192
const allColumnsSet = new Set<string>()
163-
let scanPage = 1
193+
194+
// Use the incoming page value here, defaulting to 1 if undefined
195+
let scanPage = adjustedPage
164196
let hasMore = true
197+
let fetched = 0
198+
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY
165199

166200
while (hasMore) {
167-
const result = await payload.find({ ...findArgs, page: scanPage })
201+
const remaining = Math.max(0, maxDocs - fetched)
202+
if (remaining === 0) {
203+
break
204+
}
205+
206+
const result = await payload.find({
207+
...findArgs,
208+
page: scanPage,
209+
limit: Math.min(batchSize, remaining),
210+
})
168211

169212
result.docs.forEach((doc) => {
170213
const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))
@@ -176,8 +219,9 @@ export const createExport = async (args: CreateExportArgs) => {
176219
})
177220
})
178221

179-
hasMore = result.hasNextPage
180-
scanPage += 1
222+
fetched += result.docs.length
223+
scanPage += 1 // Increment page for next batch
224+
hasMore = result.hasNextPage && fetched < maxDocs
181225
}
182226

183227
if (debug) {
@@ -187,11 +231,27 @@ export const createExport = async (args: CreateExportArgs) => {
187231

188232
const encoder = new TextEncoder()
189233
let isFirstBatch = true
190-
let streamPage = 1
234+
let streamPage = adjustedPage
235+
let fetched = 0
236+
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY
191237

192238
const stream = new Readable({
193239
async read() {
194-
const result = await payload.find({ ...findArgs, page: streamPage })
240+
const remaining = Math.max(0, maxDocs - fetched)
241+
242+
if (remaining === 0) {
243+
if (!isCSV) {
244+
this.push(encoder.encode(']'))
245+
}
246+
this.push(null)
247+
return
248+
}
249+
250+
const result = await payload.find({
251+
...findArgs,
252+
page: streamPage,
253+
limit: Math.min(batchSize, remaining),
254+
})
195255

196256
if (debug) {
197257
req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`)
@@ -240,10 +300,11 @@ export const createExport = async (args: CreateExportArgs) => {
240300
}
241301
}
242302

303+
fetched += result.docs.length
243304
isFirstBatch = false
244-
streamPage += 1
305+
streamPage += 1 // Increment stream page for the next batch
245306

246-
if (!result.hasNextPage) {
307+
if (!result.hasNextPage || fetched >= maxDocs) {
247308
if (debug) {
248309
req.payload.logger.debug('Stream complete - no more pages')
249310
}
@@ -272,18 +333,29 @@ export const createExport = async (args: CreateExportArgs) => {
272333
const rows: Record<string, unknown>[] = []
273334
const columnsSet = new Set<string>()
274335
const columns: string[] = []
275-
let page = 1
336+
337+
// Start from the incoming page value, defaulting to 1 if undefined
338+
let currentPage = adjustedPage
339+
let fetched = 0
276340
let hasNextPage = true
341+
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY
277342

278343
while (hasNextPage) {
344+
const remaining = Math.max(0, maxDocs - fetched)
345+
346+
if (remaining === 0) {
347+
break
348+
}
349+
279350
const result = await payload.find({
280351
...findArgs,
281-
page,
352+
page: currentPage,
353+
limit: Math.min(batchSize, remaining),
282354
})
283355

284356
if (debug) {
285357
req.payload.logger.debug(
286-
`Processing batch ${findArgs.page} with ${result.docs.length} documents`,
358+
`Processing batch ${currentPage} with ${result.docs.length} documents`,
287359
)
288360
}
289361

@@ -308,10 +380,12 @@ export const createExport = async (args: CreateExportArgs) => {
308380
outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n'))
309381
}
310382

311-
hasNextPage = result.hasNextPage
312-
page += 1
383+
fetched += result.docs.length
384+
hasNextPage = result.hasNextPage && fetched < maxDocs
385+
currentPage += 1 // Increment page for next batch
313386
}
314387

388+
// Prepare final output
315389
if (isCSV) {
316390
const paddedRows = rows.map((row) => {
317391
const fullRow: Record<string, unknown> = {}

packages/plugin-import-export/src/export/download.ts

Lines changed: 25 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,22 +5,33 @@ import { APIError } from 'payload'
55
import { createExport } from './createExport.js'
66

77
export const download = async (req: PayloadRequest, debug = false) => {
8-
let body
9-
if (typeof req?.json === 'function') {
10-
body = await req.json()
11-
}
8+
try {
9+
let body
10+
if (typeof req?.json === 'function') {
11+
body = await req.json()
12+
}
1213

13-
if (!body || !body.data) {
14-
throw new APIError('Request data is required.')
15-
}
14+
if (!body || !body.data) {
15+
throw new APIError('Request data is required.')
16+
}
1617

17-
req.payload.logger.info(`Download request received ${body.data.collectionSlug}`)
18+
const { collectionSlug } = body.data || {}
1819

19-
body.data.user = req.user
20+
req.payload.logger.info(`Download request received ${collectionSlug}`)
21+
body.data.user = req.user
2022

21-
return createExport({
22-
download: true,
23-
input: { ...body.data, debug },
24-
req,
25-
}) as Promise<Response>
23+
const res = await createExport({
24+
download: true,
25+
input: { ...body.data, debug },
26+
req,
27+
})
28+
29+
return res as Response
30+
} catch (err) {
31+
// Return JSON for front-end toast
32+
return new Response(
33+
JSON.stringify({ errors: [{ message: (err as Error).message || 'Something went wrong' }] }),
34+
{ headers: { 'Content-Type': 'application/json' }, status: 400 },
35+
)
36+
}
2637
}

0 commit comments

Comments
 (0)