@@ -4,6 +4,7 @@ import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js';
4
4
import { EventType , SchemaVendor } from './constants.js' ;
5
5
import { BatchProcessingError } from './errors.js' ;
6
6
import type {
7
+ BasePartialBatchProcessorParserConfig ,
7
8
BaseRecord ,
8
9
EventSourceDataClassTypes ,
9
10
FailureResponse ,
@@ -108,13 +109,9 @@ class BatchProcessor extends BasePartialBatchProcessor {
108
109
record : BaseRecord
109
110
) : Promise < SuccessResponse | FailureResponse > {
110
111
try {
111
- const recordToProcess =
112
- this . schema == null
113
- ? record
114
- : await this . #parseRecord( record , this . eventType , this . schema ) ;
112
+ const recordToProcess = await this . #parseRecord( record , this . eventType ) ;
115
113
const data = this . toBatchType ( recordToProcess , this . eventType ) ;
116
114
const result = await this . handler ( data , this . options ?. context ) ;
117
-
118
115
return this . successHandler ( record , result ) ;
119
116
} catch ( error ) {
120
117
return this . failureHandler ( record , error as Error ) ;
@@ -146,69 +143,53 @@ class BatchProcessor extends BasePartialBatchProcessor {
146
143
*/
147
144
async #createExtendedSchema( options : {
148
145
eventType : keyof typeof EventType ;
149
- schema : StandardSchemaV1 ;
150
- useTransformers : boolean ;
146
+ innerSchema : StandardSchemaV1 ;
147
+ transformer ?: BasePartialBatchProcessorParserConfig [ 'transformer' ] ;
151
148
} ) {
152
- const { eventType, schema, useTransformers } = options ;
149
+ const { eventType, innerSchema, transformer } = options ;
150
+ let schema = innerSchema ;
151
+ switch ( transformer ) {
152
+ case 'json' : {
153
+ const { JSONStringified } = await import (
154
+ '@aws-lambda-powertools/parser/helpers'
155
+ ) ;
156
+ schema = JSONStringified ( innerSchema as any ) ;
157
+ break ;
158
+ }
159
+ case 'base64' : {
160
+ const { Base64Encoded } = await import (
161
+ '@aws-lambda-powertools/parser/helpers'
162
+ ) ;
163
+ schema = Base64Encoded ( innerSchema as any ) ;
164
+ break ;
165
+ }
166
+ case 'unmarshall' : {
167
+ const { DynamoDBMarshalled } = await import (
168
+ '@aws-lambda-powertools/parser/helpers/dynamodb'
169
+ ) ;
170
+ schema = DynamoDBMarshalled ( innerSchema as any ) ;
171
+ break ;
172
+ }
173
+ }
153
174
switch ( eventType ) {
154
175
case EventType . SQS : {
155
- if ( useTransformers ) {
156
- const [ { JSONStringified } , { SqsRecordSchema } ] = await Promise . all ( [
157
- import ( '@aws-lambda-powertools/parser/helpers' ) ,
158
- import ( '@aws-lambda-powertools/parser/schemas/sqs' ) ,
159
- ] ) ;
160
- return SqsRecordSchema . extend ( {
161
- body : JSONStringified ( schema as any ) ,
162
- } ) ;
163
- }
164
176
const { SqsRecordSchema } = await import (
165
177
'@aws-lambda-powertools/parser/schemas/sqs'
166
178
) ;
167
- return SqsRecordSchema . extend ( { body : schema } ) ;
179
+ return SqsRecordSchema . extend ( {
180
+ body : schema ,
181
+ } ) ;
168
182
}
169
-
170
183
case EventType . KinesisDataStreams : {
171
- if ( useTransformers ) {
172
- const [
173
- { Base64Encoded } ,
174
- { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } ,
175
- ] = await Promise . all ( [
176
- import ( '@aws-lambda-powertools/parser/helpers' ) ,
177
- import ( '@aws-lambda-powertools/parser/schemas/kinesis' ) ,
178
- ] ) ;
179
- return KinesisDataStreamRecord . extend ( {
180
- kinesis : KinesisDataStreamRecordPayload . extend ( {
181
- data : Base64Encoded ( schema as any ) ,
182
- } ) ,
183
- } ) ;
184
- }
185
184
const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } =
186
185
await import ( '@aws-lambda-powertools/parser/schemas/kinesis' ) ;
187
186
return KinesisDataStreamRecord . extend ( {
188
- kinesis : KinesisDataStreamRecordPayload . extend ( { data : schema } ) ,
187
+ kinesis : KinesisDataStreamRecordPayload . extend ( {
188
+ data : schema ,
189
+ } ) ,
189
190
} ) ;
190
191
}
191
-
192
192
case EventType . DynamoDBStreams : {
193
- if ( useTransformers ) {
194
- const [
195
- { DynamoDBMarshalled } ,
196
- { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } ,
197
- ] = await Promise . all ( [
198
- import ( '@aws-lambda-powertools/parser/helpers/dynamodb' ) ,
199
- import ( '@aws-lambda-powertools/parser/schemas/dynamodb' ) ,
200
- ] ) ;
201
- return DynamoDBStreamRecord . extend ( {
202
- dynamodb : DynamoDBStreamChangeRecordBase . extend ( {
203
- OldImage : DynamoDBMarshalled < StreamRecord [ 'OldImage' ] > (
204
- schema as any
205
- ) . optional ( ) ,
206
- NewImage : DynamoDBMarshalled < StreamRecord [ 'NewImage' ] > (
207
- schema as any
208
- ) . optional ( ) ,
209
- } ) ,
210
- } ) ;
211
- }
212
193
const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } =
213
194
await import ( '@aws-lambda-powertools/parser/schemas/dynamodb' ) ;
214
195
return DynamoDBStreamRecord . extend ( {
@@ -218,7 +199,6 @@ class BatchProcessor extends BasePartialBatchProcessor {
218
199
} ) ,
219
200
} ) ;
220
201
}
221
-
222
202
default : {
223
203
console . warn (
224
204
`The event type provided is not supported. Supported events: ${ Object . values ( EventType ) . join ( ',' ) } `
@@ -238,57 +218,77 @@ class BatchProcessor extends BasePartialBatchProcessor {
238
218
*
239
219
* @param record - The record to be parsed
240
220
* @param eventType - The type of event to process
241
- * @param schema - The StandardSchema to be used for parsing
242
221
*/
243
222
async #parseRecord(
244
223
record : EventSourceDataClassTypes ,
245
- eventType : keyof typeof EventType ,
246
- schema : StandardSchemaV1
224
+ eventType : keyof typeof EventType
247
225
) : Promise < EventSourceDataClassTypes > {
248
- const { parse } = await import ( '@aws-lambda-powertools/parser' ) ;
249
- // Try parsing with the original schema first
250
- const extendedSchemaParsing = parse ( record , undefined , schema , true ) ;
251
- if ( extendedSchemaParsing . success ) {
252
- return extendedSchemaParsing . data as EventSourceDataClassTypes ;
226
+ if ( this . parserConfig == null ) {
227
+ return record ;
253
228
}
254
- // Only proceed with schema extension if it's a Zod schema
255
- if ( schema [ '~standard' ] . vendor !== SchemaVendor . Zod ) {
256
- console . warn (
257
- 'The schema provided is not supported. Only Zod schemas are supported for extension.'
229
+ const { parse } = await import ( '@aws-lambda-powertools/parser' ) ;
230
+ const { schema, innerSchema, transformer } = this . parserConfig ;
231
+ // If the external schema is specified, use it to parse the record
232
+ if ( schema != null ) {
233
+ const extendedSchemaParsing = parse ( record , undefined , schema , true ) ;
234
+ if ( extendedSchemaParsing . success ) {
235
+ return extendedSchemaParsing . data as EventSourceDataClassTypes ;
236
+ }
237
+ const issues = extendedSchemaParsing . error
238
+ . cause as ReadonlyArray < StandardSchemaV1 . Issue > ;
239
+ throw new Error (
240
+ `Failed to parse record: ${ issues . map ( ( issue ) => `${ issue ?. path ?. join ( '.' ) } : ${ issue . message } ` ) . join ( '; ' ) } `
258
241
) ;
259
- throw new Error ( 'Unsupported schema type' ) ;
260
- }
261
- // Handle schema extension based on event type
262
- // Try without transformers first, then with transformers
263
- const schemaWithoutTransformers = await this . #createExtendedSchema( {
264
- eventType,
265
- schema,
266
- useTransformers : false ,
267
- } ) ;
268
- const schemaWithoutTransformersParsing = parse (
269
- record ,
270
- undefined ,
271
- schemaWithoutTransformers ,
272
- true
273
- ) ;
274
- if ( schemaWithoutTransformersParsing . success ) {
275
- return schemaWithoutTransformersParsing . data as EventSourceDataClassTypes ;
276
242
}
277
- const schemaWithTransformers = await this . #createExtendedSchema( {
278
- eventType,
279
- schema,
280
- useTransformers : true ,
281
- } ) ;
282
- const schemaWithTransformersParsing = parse (
283
- record ,
284
- undefined ,
285
- schemaWithTransformers ,
286
- true
287
- ) ;
288
- if ( schemaWithTransformersParsing . success ) {
289
- return schemaWithTransformersParsing . data as EventSourceDataClassTypes ;
243
+ if ( innerSchema != null ) {
244
+ // Only proceed with schema extension if it's a Zod schema
245
+ if ( innerSchema [ '~standard' ] . vendor !== SchemaVendor . Zod ) {
246
+ console . warn (
247
+ 'The schema provided is not supported. Only Zod schemas are supported for extension.'
248
+ ) ;
249
+ throw new Error ( 'Unsupported schema type' ) ;
250
+ }
251
+ if ( transformer != null ) {
252
+ const schemaWithTransformers = await this . #createExtendedSchema( {
253
+ eventType,
254
+ innerSchema,
255
+ transformer,
256
+ } ) ;
257
+ const schemaWithTransformersParsing = parse (
258
+ record ,
259
+ undefined ,
260
+ schemaWithTransformers ,
261
+ true
262
+ ) ;
263
+ if ( schemaWithTransformersParsing . success ) {
264
+ return schemaWithTransformersParsing . data as EventSourceDataClassTypes ;
265
+ }
266
+ const issues = schemaWithTransformersParsing . error
267
+ . cause as ReadonlyArray < StandardSchemaV1 . Issue > ;
268
+ throw new Error (
269
+ `Failed to parse record: ${ issues . map ( ( issue ) => `${ issue ?. path ?. join ( '.' ) } : ${ issue . message } ` ) . join ( '; ' ) } `
270
+ ) ;
271
+ }
272
+ const schemaWithoutTransformers = await this . #createExtendedSchema( {
273
+ eventType,
274
+ innerSchema,
275
+ } ) ;
276
+ const schemaWithoutTransformersParsing = parse (
277
+ record ,
278
+ undefined ,
279
+ schemaWithoutTransformers ,
280
+ true
281
+ ) ;
282
+ if ( schemaWithoutTransformersParsing . success ) {
283
+ return schemaWithoutTransformersParsing . data as EventSourceDataClassTypes ;
284
+ }
285
+ const issues = schemaWithoutTransformersParsing . error
286
+ . cause as ReadonlyArray < StandardSchemaV1 . Issue > ;
287
+ throw new Error (
288
+ `Failed to parse record: ${ issues . map ( ( issue ) => `${ issue ?. path ?. join ( '.' ) } : ${ issue . message } ` ) . join ( '; ' ) } `
289
+ ) ;
290
290
}
291
- throw new Error ( 'Failed to parse record ' ) ;
291
+ throw new Error ( 'Either schema or innerSchema is required for parsing ' ) ;
292
292
}
293
293
}
294
294
0 commit comments