@@ -68,10 +68,30 @@ export default async function handler(
68
68
return res . status ( 200 ) . json ( { sessions : [ ] } ) ;
69
69
}
70
70
71
- // Get metadata for each object to fetch the title
72
- const objectsWithMetadata = await Promise . all (
73
- data . Contents . filter ( ( obj ) => obj . Key && obj . Key . endsWith ( ".json" ) ) . map (
74
- async ( obj ) => {
71
+ // Helper function to chunk array for batch processing
72
+ const chunkArray = < T > ( array : T [ ] , chunkSize : number ) : T [ ] [ ] => {
73
+ const chunks : T [ ] [ ] = [ ] ;
74
+ for ( let i = 0 ; i < array . length ; i += chunkSize ) {
75
+ chunks . push ( array . slice ( i , i + chunkSize ) ) ;
76
+ }
77
+ return chunks ;
78
+ } ;
79
+
80
+ // Filter JSON files and prepare for batched metadata fetching
81
+ const jsonFiles = data . Contents . filter (
82
+ ( obj ) => obj . Key && obj . Key . endsWith ( ".json" )
83
+ ) ;
84
+
85
+ // Process files in batches of 10 to avoid overwhelming S3 with concurrent requests
86
+ const BATCH_SIZE = 10 ;
87
+ const fileChunks = chunkArray ( jsonFiles , BATCH_SIZE ) ;
88
+
89
+ const allObjectsWithMetadata : ( HistorySession | null ) [ ] = [ ] ;
90
+
91
+ // Process each batch sequentially to control concurrency
92
+ for ( const chunk of fileChunks ) {
93
+ const chunkResults = await Promise . all (
94
+ chunk . map ( async ( obj ) => {
75
95
try {
76
96
const headResponse = await s3 . send (
77
97
new HeadObjectCommand ( {
@@ -128,11 +148,13 @@ export default async function handler(
128
148
}
129
149
return null ;
130
150
}
131
- }
132
- )
133
- ) ;
151
+ } )
152
+ ) ;
153
+
154
+ allObjectsWithMetadata . push ( ...chunkResults ) ;
155
+ }
134
156
135
- const sessions : HistorySession [ ] = objectsWithMetadata
157
+ const sessions : HistorySession [ ] = allObjectsWithMetadata
136
158
. filter ( Boolean )
137
159
. sort ( ( a , b ) =>
138
160
b ! . timestamp . localeCompare ( a ! . timestamp )
0 commit comments