Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,17 @@ export const ExportSaveButton: React.FC = () => {
}

if (!response.ok) {
throw new Error('Failed to download file')
// Try to parse the error message from the JSON response
let errorMsg = 'Failed to download file'
try {
const errorJson = await response.json()
if (errorJson?.errors?.[0]?.message) {
errorMsg = errorJson.errors[0].message
}
} catch {
// Ignore JSON parse errors, fallback to generic message
}
throw new Error(errorMsg)
}

const fileStream = response.body
Expand All @@ -98,9 +108,8 @@ export const ExportSaveButton: React.FC = () => {
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(url)
} catch (error) {
console.error('Error downloading file:', error)
toast.error('Error downloading file')
} catch (error: any) {
toast.error(error.message || 'Error downloading file')
}
}

Expand Down
3 changes: 3 additions & 0 deletions packages/plugin-import-export/src/components/Page/index.scss
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
.page-field {
--field-width: 33.3333%;
}
41 changes: 41 additions & 0 deletions packages/plugin-import-export/src/components/Page/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
'use client'

import type { NumberFieldClientComponent } from 'payload'

import { NumberField, useField } from '@payloadcms/ui'
import React, { useEffect } from 'react'

import './index.scss'

const baseClass = 'page-field'

export const Page: NumberFieldClientComponent = (props) => {
const { setValue } = useField<number>()
const { value: limitValue } = useField<number>({ path: 'limit' })

// Effect to reset page to 1 if limit is removed
useEffect(() => {
if (!limitValue) {
setValue(1) // Reset page to 1
}
}, [limitValue, setValue])

return (
<div className={baseClass}>
<NumberField
field={{
name: props.field.name,
admin: {
autoComplete: undefined,
placeholder: undefined,
step: 1,
},
label: props.field.label,
min: 1,
}}
onChange={(value) => setValue(value ?? 1)} // Update the page value on change
path={props.path}
/>
</div>
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ export const Preview = () => {
const { collection } = useImportExport()
const { config } = useConfig()
const { value: where } = useField({ path: 'where' })
const { value: page } = useField({ path: 'page' })
const { value: limit } = useField<number>({ path: 'limit' })
const { value: fields } = useField<string[]>({ path: 'fields' })
const { value: sort } = useField({ path: 'sort' })
Expand Down Expand Up @@ -71,6 +72,7 @@ export const Preview = () => {
format,
limit,
locale,
page,
sort,
where,
}),
Expand Down Expand Up @@ -168,6 +170,7 @@ export const Preview = () => {
i18n,
limit,
locale,
page,
sort,
where,
])
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
.sort-by-fields {
display: block;
width: 33%;
--field-width: 25%;
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import React, { useEffect, useState } from 'react'

import { reduceFields } from '../FieldsToExport/reduceFields.js'
import { useImportExport } from '../ImportExportProvider/index.js'
import './index.scss'

const baseClass = 'sort-by-fields'

Expand Down Expand Up @@ -71,7 +72,7 @@ export const SortBy: SelectFieldClientComponent = (props) => {
}

return (
<div className={baseClass} style={{ '--field-width': '33%' } as React.CSSProperties}>
<div className={baseClass}>
<FieldLabel label={props.field.label} path={props.path} />
<ReactSelect
className={baseClass}
Expand Down
104 changes: 89 additions & 15 deletions packages/plugin-import-export/src/export/createExport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { APIError } from 'payload'
import { Readable } from 'stream'

import { buildDisabledFieldRegex } from '../utilities/buildDisabledFieldRegex.js'
import { validateLimitValue } from '../utilities/validateLimitValue.js'
import { flattenObject } from './flattenObject.js'
import { getCustomFieldFunctions } from './getCustomFieldFunctions.js'
import { getFilename } from './getFilename.js'
Expand All @@ -23,8 +24,10 @@ export type Export = {
format: 'csv' | 'json'
globals?: string[]
id: number | string
limit?: number
locale?: string
name: string
page?: number
slug: string
sort: Sort
user: string
Expand Down Expand Up @@ -57,6 +60,8 @@ export const createExport = async (args: CreateExportArgs) => {
locale: localeInput,
sort,
user,
page,
limit: incomingLimit,
where,
},
req: { locale: localeArg, payload },
Expand Down Expand Up @@ -87,14 +92,30 @@ export const createExport = async (args: CreateExportArgs) => {
req.payload.logger.debug({ message: 'Export configuration:', name, isCSV, locale })
}

const batchSize = 100 // fixed per request

const hardLimit =
typeof incomingLimit === 'number' && incomingLimit > 0 ? incomingLimit : undefined

const { totalDocs } = await payload.count({
collection: collectionSlug,
user,
locale,
overrideAccess: false,
})

const totalPages = Math.max(1, Math.ceil(totalDocs / batchSize))
const requestedPage = page || 1
const adjustedPage = requestedPage > totalPages ? 1 : requestedPage

const findArgs = {
collection: collectionSlug,
depth: 1,
draft: drafts === 'yes',
limit: 100,
limit: batchSize,
locale,
overrideAccess: false,
page: 0,
page: 0, // The page will be incremented manually in the loop
select,
sort,
user,
Expand Down Expand Up @@ -156,15 +177,37 @@ export const createExport = async (args: CreateExportArgs) => {
req.payload.logger.debug('Pre-scanning all columns before streaming')
}

const limitErrorMsg = validateLimitValue(
incomingLimit,
req.t,
batchSize, // step i.e. 100
)
if (limitErrorMsg) {
throw new APIError(limitErrorMsg)
}

const allColumns: string[] = []

if (isCSV) {
const allColumnsSet = new Set<string>()
let scanPage = 1

// Use the incoming page value here, defaulting to 1 if undefined
let scanPage = adjustedPage
let hasMore = true
let fetched = 0
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY

while (hasMore) {
const result = await payload.find({ ...findArgs, page: scanPage })
const remaining = Math.max(0, maxDocs - fetched)
if (remaining === 0) {
break
}

const result = await payload.find({
...findArgs,
page: scanPage,
limit: Math.min(batchSize, remaining),
})

result.docs.forEach((doc) => {
const flat = filterDisabledCSV(flattenObject({ doc, fields, toCSVFunctions }))
Expand All @@ -176,8 +219,9 @@ export const createExport = async (args: CreateExportArgs) => {
})
})

hasMore = result.hasNextPage
scanPage += 1
fetched += result.docs.length
scanPage += 1 // Increment page for next batch
hasMore = result.hasNextPage && fetched < maxDocs
}

if (debug) {
Expand All @@ -187,11 +231,27 @@ export const createExport = async (args: CreateExportArgs) => {

const encoder = new TextEncoder()
let isFirstBatch = true
let streamPage = 1
let streamPage = adjustedPage
let fetched = 0
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY

const stream = new Readable({
async read() {
const result = await payload.find({ ...findArgs, page: streamPage })
const remaining = Math.max(0, maxDocs - fetched)

if (remaining === 0) {
if (!isCSV) {
this.push(encoder.encode(']'))
}
this.push(null)
return
}

const result = await payload.find({
...findArgs,
page: streamPage,
limit: Math.min(batchSize, remaining),
})

if (debug) {
req.payload.logger.debug(`Streaming batch ${streamPage} with ${result.docs.length} docs`)
Expand Down Expand Up @@ -240,10 +300,11 @@ export const createExport = async (args: CreateExportArgs) => {
}
}

fetched += result.docs.length
isFirstBatch = false
streamPage += 1
streamPage += 1 // Increment stream page for the next batch

if (!result.hasNextPage) {
if (!result.hasNextPage || fetched >= maxDocs) {
if (debug) {
req.payload.logger.debug('Stream complete - no more pages')
}
Expand Down Expand Up @@ -272,18 +333,29 @@ export const createExport = async (args: CreateExportArgs) => {
const rows: Record<string, unknown>[] = []
const columnsSet = new Set<string>()
const columns: string[] = []
let page = 1

// Start from the incoming page value, defaulting to 1 if undefined
let currentPage = adjustedPage
let fetched = 0
let hasNextPage = true
const maxDocs = typeof hardLimit === 'number' ? hardLimit : Number.POSITIVE_INFINITY

while (hasNextPage) {
const remaining = Math.max(0, maxDocs - fetched)

if (remaining === 0) {
break
}

const result = await payload.find({
...findArgs,
page,
page: currentPage,
limit: Math.min(batchSize, remaining),
})

if (debug) {
req.payload.logger.debug(
`Processing batch ${findArgs.page} with ${result.docs.length} documents`,
`Processing batch ${currentPage} with ${result.docs.length} documents`,
)
}

Expand All @@ -308,10 +380,12 @@ export const createExport = async (args: CreateExportArgs) => {
outputData.push(batchRows.map((doc) => JSON.stringify(doc)).join(',\n'))
}

hasNextPage = result.hasNextPage
page += 1
fetched += result.docs.length
hasNextPage = result.hasNextPage && fetched < maxDocs
currentPage += 1 // Increment page for next batch
}

// Prepare final output
if (isCSV) {
const paddedRows = rows.map((row) => {
const fullRow: Record<string, unknown> = {}
Expand Down
39 changes: 25 additions & 14 deletions packages/plugin-import-export/src/export/download.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,33 @@ import { APIError } from 'payload'
import { createExport } from './createExport.js'

export const download = async (req: PayloadRequest, debug = false) => {
let body
if (typeof req?.json === 'function') {
body = await req.json()
}
try {
let body
if (typeof req?.json === 'function') {
body = await req.json()
}

if (!body || !body.data) {
throw new APIError('Request data is required.')
}
if (!body || !body.data) {
throw new APIError('Request data is required.')
}

req.payload.logger.info(`Download request received ${body.data.collectionSlug}`)
const { collectionSlug } = body.data || {}

body.data.user = req.user
req.payload.logger.info(`Download request received ${collectionSlug}`)
body.data.user = req.user

return createExport({
download: true,
input: { ...body.data, debug },
req,
}) as Promise<Response>
const res = await createExport({
download: true,
input: { ...body.data, debug },
req,
})

return res as Response
} catch (err) {
// Return JSON for front-end toast
return new Response(
JSON.stringify({ errors: [{ message: (err as Error).message || 'Something went wrong' }] }),
{ headers: { 'Content-Type': 'application/json' }, status: 400 },
)
}
}
Loading