From d625ed222ce2ad547f08a3bb876ad369fcef4001 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Tue, 2 Sep 2025 19:42:11 +0100 Subject: [PATCH 01/19] Integrated Parser for the SQS event type --- package-lock.json | 3 + packages/batch/package.json | 12 + .../batch/src/BasePartialBatchProcessor.ts | 43 ++- packages/batch/src/BatchProcessor.ts | 2 +- packages/batch/src/types.ts | 7 + .../batch/tests/unit/BatchProcessor.test.ts | 287 +++++++++++++++++- 6 files changed, 348 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 21cd368f21..74c0fa3c40 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10733,6 +10733,9 @@ "name": "@aws-lambda-powertools/batch", "version": "2.25.2", "license": "MIT-0", + "dependencies": { + "@aws-lambda-powertools/parser": "2.25.2" + }, "devDependencies": { "@aws-lambda-powertools/testing-utils": "file:../testing" } diff --git a/packages/batch/package.json b/packages/batch/package.json index e91a4f2a30..4fad9c8fd1 100644 --- a/packages/batch/package.json +++ b/packages/batch/package.json @@ -74,5 +74,17 @@ ], "devDependencies": { "@aws-lambda-powertools/testing-utils": "file:../testing" + }, + "peerDependencies": { + "@aws-lambda-powertools/parser": "2.25.2", + "@standard-schema/spec": "^1.0.0" + }, + "peerDependenciesMeta": { + "@aws-lambda-powertools/parser": { + "optional": true + }, + "@standard-schema/spec": { + "optional": true + } } } diff --git a/packages/batch/src/BasePartialBatchProcessor.ts b/packages/batch/src/BasePartialBatchProcessor.ts index f4ac5ac6c9..bc1f170a78 100644 --- a/packages/batch/src/BasePartialBatchProcessor.ts +++ b/packages/batch/src/BasePartialBatchProcessor.ts @@ -1,3 +1,4 @@ +import type { StandardSchemaV1 } from '@standard-schema/spec'; import type { DynamoDBRecord, KinesisStreamRecord, @@ -11,6 +12,7 @@ import { } from './constants.js'; import { FullBatchFailureError } from './errors.js'; import type { + BasePartialBatchProcessorConfig, EventSourceDataClassTypes, PartialItemFailureResponse, PartialItemFailures, @@ -42,12 +44,20 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor { */ public eventType: keyof typeof EventType; + /** + * The schema of the body of the event record for parsing + */ + public schema?: StandardSchemaV1; + /** * Initializes base batch processing class * * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) */ - public constructor(eventType: keyof typeof EventType) { + public constructor( + eventType: keyof typeof EventType, + config?: BasePartialBatchProcessorConfig + ) { super(); this.eventType = eventType; this.batchResponse = DEFAULT_RESPONSE; @@ -56,6 +66,9 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor { [EventType.KinesisDataStreams]: () => this.collectKinesisFailures(), [EventType.DynamoDBStreams]: () => this.collectDynamoDBFailures(), }; + if (config) { + this.schema = config.schema; + } } /** @@ -194,10 +207,32 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor { * @param record The record to be processed * @param eventType The type of event to process */ - public toBatchType( + public async toBatchType( record: EventSourceDataClassTypes, - eventType: keyof typeof EventType - ): SQSRecord | KinesisStreamRecord | DynamoDBRecord { + eventType: keyof typeof EventType, + schema?: StandardSchemaV1 + ): Promise { + if (schema) { + const { parse } = await import('@aws-lambda-powertools/parser'); + if (eventType === EventType.SQS) { + try { + return parse(record, undefined, schema) as SQSRecord; + } catch (error) { + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(schema as any), + }); + return parse(record, undefined, extendedSchema); + } + } + throw new Error('Unsupported event type'); + } return DATA_CLASS_MAPPING[eventType](record); } } diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 4cff0ee8c9..6896b7bd0b 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -100,7 +100,7 @@ class BatchProcessor extends BasePartialBatchProcessor { record: BaseRecord ): Promise { try { - const data = this.toBatchType(record, this.eventType); + const data = await this.toBatchType(record, this.eventType, this.schema); const result = await this.handler(data, this.options?.context); return this.successHandler(record, result); diff --git a/packages/batch/src/types.ts b/packages/batch/src/types.ts index e94b811f6c..62820df23a 100644 --- a/packages/batch/src/types.ts +++ b/packages/batch/src/types.ts @@ -1,9 +1,11 @@ +import type { StandardSchemaV1 } from '@standard-schema/spec'; import type { Context, DynamoDBRecord, KinesisStreamRecord, SQSRecord, } from 'aws-lambda'; + import type { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js'; import type { SqsFifoPartialProcessor } from './SqsFifoPartialProcessor.js'; import type { SqsFifoPartialProcessorAsync } from './SqsFifoPartialProcessorAsync.js'; @@ -89,6 +91,10 @@ type PartialItemFailures = { itemIdentifier: string }; */ type PartialItemFailureResponse = { batchItemFailures: PartialItemFailures[] }; +type BasePartialBatchProcessorConfig = { + schema: StandardSchemaV1; +}; + export type { BatchProcessingOptions, BaseRecord, @@ -97,4 +103,5 @@ export type { FailureResponse, PartialItemFailures, PartialItemFailureResponse, + BasePartialBatchProcessorConfig, }; diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 650354d968..c505ea6045 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,6 +1,7 @@ import context from '@aws-lambda-powertools/testing-utils/context'; -import type { Context } from 'aws-lambda'; +import type { Context, SQSRecord } from 'aws-lambda'; import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; +import { z } from 'zod'; import { BatchProcessingError, BatchProcessor, @@ -286,4 +287,288 @@ describe('Class: AsyncBatchProcessor', () => { // Act & Assess expect(() => processor.processSync()).toThrowError(BatchProcessingError); }); + + describe('Batch processing with Parser Integration', () => { + describe('Passing Internal Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + customObject1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 'invalid-age', + }; + const customObject2 = { + name: 20, + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); + + describe('Passing Extended Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + customObject1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 'invalid-age', + }; + const customObject2 = { + name: 20, + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); + }); }); From 4e69aec7a41bbce4eb345d4dcb997a1ce7e4e353 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 09:10:46 +0100 Subject: [PATCH 02/19] Removed the parsing from the toBatchType function and created it's own method --- .../batch/src/BasePartialBatchProcessor.ts | 28 ++--------- packages/batch/src/BatchProcessor.ts | 50 ++++++++++++++++++- 2 files changed, 51 insertions(+), 27 deletions(-) diff --git a/packages/batch/src/BasePartialBatchProcessor.ts b/packages/batch/src/BasePartialBatchProcessor.ts index bc1f170a78..e7f824fed1 100644 --- a/packages/batch/src/BasePartialBatchProcessor.ts +++ b/packages/batch/src/BasePartialBatchProcessor.ts @@ -207,32 +207,10 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor { * @param record The record to be processed * @param eventType The type of event to process */ - public async toBatchType( + public toBatchType( record: EventSourceDataClassTypes, - eventType: keyof typeof EventType, - schema?: StandardSchemaV1 - ): Promise { - if (schema) { - const { parse } = await import('@aws-lambda-powertools/parser'); - if (eventType === EventType.SQS) { - try { - return parse(record, undefined, schema) as SQSRecord; - } catch (error) { - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(schema as any), - }); - return parse(record, undefined, extendedSchema); - } - } - throw new Error('Unsupported event type'); - } + eventType: keyof typeof EventType + ): SQSRecord | KinesisStreamRecord | DynamoDBRecord { return DATA_CLASS_MAPPING[eventType](record); } } diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 6896b7bd0b..99a2ee6d28 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -1,6 +1,18 @@ +import type { StandardSchemaV1 } from '@standard-schema/spec'; +import type { + DynamoDBRecord, + KinesisStreamRecord, + SQSRecord, +} from 'aws-lambda'; import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js'; +import { EventType } from './constants.js'; import { BatchProcessingError } from './errors.js'; -import type { BaseRecord, FailureResponse, SuccessResponse } from './types.js'; +import type { + BaseRecord, + EventSourceDataClassTypes, + FailureResponse, + SuccessResponse, +} from './types.js'; /** * Process records in a batch asynchronously and handle partial failure cases. @@ -100,7 +112,12 @@ class BatchProcessor extends BasePartialBatchProcessor { record: BaseRecord ): Promise { try { - const data = await this.toBatchType(record, this.eventType, this.schema); + const parsedRecord = await this.parseRecord( + record, + this.eventType, + this.schema + ); + const data = this.toBatchType(parsedRecord, this.eventType); const result = await this.handler(data, this.options?.context); return this.successHandler(record, result); @@ -121,6 +138,35 @@ class BatchProcessor extends BasePartialBatchProcessor { 'Not implemented. Use asyncProcess() instead.' ); } + + public async parseRecord( + record: EventSourceDataClassTypes, + eventType: keyof typeof EventType, + schema?: StandardSchemaV1 + ): Promise { + if (schema) { + const { parse } = await import('@aws-lambda-powertools/parser'); + if (eventType === EventType.SQS) { + try { + return parse(record, undefined, schema) as SQSRecord; + } catch (error) { + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(schema as any), + }); + return parse(record, undefined, extendedSchema); + } + } + throw new Error('Unsupported event type'); + } + return record; + } } export { BatchProcessor }; From 0a60d4f108848a9062199bff6f19286387f96db1 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 09:23:41 +0100 Subject: [PATCH 03/19] Added test to check for invalid event type --- .../batch/tests/unit/BatchProcessor.test.ts | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index c505ea6045..5cbe154f5e 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -570,5 +570,44 @@ describe('Class: AsyncBatchProcessor', () => { ); }); }); + + it('completes processing with all failures if an unsupported event type is used for parsing', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + //@ts-expect-error + const processor = new BatchProcessor('invalid-event-type', { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); }); }); From d0b3d23e7b1eaf70f7c468493aa8a19922cf1fc3 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 09:27:44 +0100 Subject: [PATCH 04/19] Added documentation for the function --- packages/batch/src/BatchProcessor.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 99a2ee6d28..9b6080885c 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -139,6 +139,15 @@ class BatchProcessor extends BasePartialBatchProcessor { ); } + /** + * Parse the record according to the schema passed. + * + * If the schema is not provided, it returns the record as is. + * + * @param record The record to be parsed + * @param eventType The type of event to process + * @param schema The StandardSchema to be used for parsing + */ public async parseRecord( record: EventSourceDataClassTypes, eventType: keyof typeof EventType, From d6d81f925f66b6958cb3a2b5b9c48ff8c84b63ed Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 10:10:07 +0100 Subject: [PATCH 05/19] Moved the build order of parser before the batch processor --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index decd90a37b..4a76e23aee 100644 --- a/package.json +++ b/package.json @@ -10,9 +10,9 @@ "packages/tracer", "packages/parameters", "packages/idempotency", + "packages/parser", "packages/batch", "packages/testing", - "packages/parser", "examples/snippets", "layers", "examples/app", From b9ef42ba7e03c91108846182e658707af939fe7c Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 12:42:24 +0100 Subject: [PATCH 06/19] Added condition to do the extended schema parsing only when a zod schema is used --- packages/batch/src/BatchProcessor.ts | 69 ++- packages/batch/src/constants.ts | 9 +- .../batch/tests/unit/BatchProcessor.test.ts | 559 ++++++++++-------- 3 files changed, 344 insertions(+), 293 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 9b6080885c..fd1a64842f 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -5,7 +5,7 @@ import type { SQSRecord, } from 'aws-lambda'; import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js'; -import { EventType } from './constants.js'; +import { EventType, SchemaType } from './constants.js'; import { BatchProcessingError } from './errors.js'; import type { BaseRecord, @@ -112,12 +112,11 @@ class BatchProcessor extends BasePartialBatchProcessor { record: BaseRecord ): Promise { try { - const parsedRecord = await this.parseRecord( - record, - this.eventType, - this.schema - ); - const data = this.toBatchType(parsedRecord, this.eventType); + const recordToProcess = + this.schema == null + ? record + : await this.parseRecord(record, this.eventType, this.schema); + const data = this.toBatchType(recordToProcess, this.eventType); const result = await this.handler(data, this.options?.context); return this.successHandler(record, result); @@ -142,39 +141,49 @@ class BatchProcessor extends BasePartialBatchProcessor { /** * Parse the record according to the schema passed. * - * If the schema is not provided, it returns the record as is. + * If the passed schema is already an extended schema, + * it directly uses the schema to parse the record + * + * If the passed schema is an internal payload schema, + * it checks whether it is a zod schema and + * then extends the zod schema according to the passed event type for parsing * * @param record The record to be parsed * @param eventType The type of event to process * @param schema The StandardSchema to be used for parsing */ - public async parseRecord( + private async parseRecord( record: EventSourceDataClassTypes, eventType: keyof typeof EventType, - schema?: StandardSchemaV1 + schema: StandardSchemaV1 ): Promise { - if (schema) { - const { parse } = await import('@aws-lambda-powertools/parser'); - if (eventType === EventType.SQS) { - try { - return parse(record, undefined, schema) as SQSRecord; - } catch (error) { - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(schema as any), - }); - return parse(record, undefined, extendedSchema); - } + const { parse } = await import('@aws-lambda-powertools/parser'); + if (eventType === EventType.SQS) { + const extendedSchemaParsing = parse(record, undefined, schema, true); + if (extendedSchemaParsing.success) + return extendedSchemaParsing.data as SQSRecord; + if (schema['~standard'].vendor === SchemaType.Zod) { + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema + body: JSONStringified(schema as any), + }); + return parse(record, undefined, extendedSchema); } - throw new Error('Unsupported event type'); + console.warn( + 'The schema provided is not supported. Only Zod schemas are supported for extension.' + ); + throw new Error('Unsupported schema type'); } - return record; + console.warn( + `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` + ); + throw new Error('Unsupported event type'); } } diff --git a/packages/batch/src/constants.ts b/packages/batch/src/constants.ts index 159b12df6a..7e8ee07c9a 100644 --- a/packages/batch/src/constants.ts +++ b/packages/batch/src/constants.ts @@ -17,6 +17,13 @@ const EventType = { DynamoDBStreams: 'DynamoDBStreams', } as const; +/** + * Enum of supported schema types for the utility + */ +const SchemaType = { + Zod: 'zod', +} as const; + /** * Default response for the partial batch processor */ @@ -35,4 +42,4 @@ const DATA_CLASS_MAPPING = { record as DynamoDBRecord, }; -export { EventType, DEFAULT_RESPONSE, DATA_CLASS_MAPPING }; +export { EventType, SchemaType, DEFAULT_RESPONSE, DATA_CLASS_MAPPING }; diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 5cbe154f5e..9bf47b9734 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,5 +1,6 @@ import context from '@aws-lambda-powertools/testing-utils/context'; import type { Context, SQSRecord } from 'aws-lambda'; +import * as v from 'valibot'; import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { z } from 'zod'; import { @@ -288,309 +289,343 @@ describe('Class: AsyncBatchProcessor', () => { expect(() => processor.processSync()).toThrowError(BatchProcessingError); }); - describe('Batch processing with Parser Integration', () => { - describe('Passing Internal Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[0]).toStrictEqual([ - 'success', - customObject1, - firstRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], - }); + describe('Batch processing with Parser Integration: Passing Internal Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, }); - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - const processedMessages = await processor.process(); + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + customObject1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], + }); + }); - // Assess - expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], - ]); + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, }); - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 'invalid-age', + }; + const customObject2 = { + name: 20, + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, }); - }); - describe('Passing Extended Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - const processedMessages = await processor.process(); + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); - // Assess - expect(processedMessages[0]).toStrictEqual([ - 'success', - customObject1, - firstRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], - }); + it('completes processing with failures if an unsupported event type is used for parsing', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + //@ts-expect-error + const processor = new BatchProcessor('invalid-event-type', { + schema: customSchema, }); - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - const processedMessages = await processor.process(); + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); - // Assess - expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], - ]); + it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + // Prepare + const customSchema = v.object({ + name: v.string(), + age: v.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, }); - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), - }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + // Act + processor.register( + records, + async (customObject: SQSRecord) => { + return customObject.body; + }, + options + ); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + describe('Batch processing with Parser Integration: Passing Extended Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + customObject1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], }); }); - it('completes processing with all failures if an unsupported event type is used for parsing', async () => { + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare const customSchema = z.object({ name: z.string(), age: z.number(), }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); const customObject1 = { name: 'test-1', age: 20, }; const customObject2 = { name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', age: 'invalid-age', }; + const customObject2 = { + name: 20, + age: 30, + }; const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; - //@ts-expect-error - const processor = new BatchProcessor('invalid-event-type', { - schema: customSchema, + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, }); // Act From 8c1ad65915a5f842304506dc0003b2b26e7485c1 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 12:45:41 +0100 Subject: [PATCH 07/19] Moved the parser as a dev dependency --- packages/batch/package.json | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/packages/batch/package.json b/packages/batch/package.json index 4fad9c8fd1..42396f9918 100644 --- a/packages/batch/package.json +++ b/packages/batch/package.json @@ -73,18 +73,7 @@ "nodejs" ], "devDependencies": { - "@aws-lambda-powertools/testing-utils": "file:../testing" - }, - "peerDependencies": { - "@aws-lambda-powertools/parser": "2.25.2", - "@standard-schema/spec": "^1.0.0" - }, - "peerDependenciesMeta": { - "@aws-lambda-powertools/parser": { - "optional": true - }, - "@standard-schema/spec": { - "optional": true - } + "@aws-lambda-powertools/testing-utils": "file:../testing", + "@aws-lambda-powertools/parser": "2.25.2" } } From e7c8585da9eb0ae42dda6822db93095c71fa3831 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 12:50:49 +0100 Subject: [PATCH 08/19] Updated lock file --- package-lock.json | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 74c0fa3c40..2f87b15d82 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,9 +16,9 @@ "packages/tracer", "packages/parameters", "packages/idempotency", + "packages/parser", "packages/batch", "packages/testing", - "packages/parser", "examples/snippets", "layers", "examples/app", @@ -10733,10 +10733,8 @@ "name": "@aws-lambda-powertools/batch", "version": "2.25.2", "license": "MIT-0", - "dependencies": { - "@aws-lambda-powertools/parser": "2.25.2" - }, "devDependencies": { + "@aws-lambda-powertools/parser": "2.25.2", "@aws-lambda-powertools/testing-utils": "file:../testing" } }, From b30e3bedb6d07410f23f48765a7c7a289955549f Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 17:04:10 +0100 Subject: [PATCH 09/19] Integrated Parser for the DynamoDB event type --- packages/batch/src/BatchProcessor.ts | 31 ++ .../batch/tests/unit/BatchProcessor.test.ts | 339 +++++++++++++++++- 2 files changed, 367 insertions(+), 3 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index fd1a64842f..c472192550 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -1,8 +1,10 @@ import type { StandardSchemaV1 } from '@standard-schema/spec'; import type { + AttributeValue, DynamoDBRecord, KinesisStreamRecord, SQSRecord, + StreamRecord, } from 'aws-lambda'; import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js'; import { EventType, SchemaType } from './constants.js'; @@ -180,6 +182,35 @@ class BatchProcessor extends BasePartialBatchProcessor { ); throw new Error('Unsupported schema type'); } + if (eventType === EventType.DynamoDBStreams) { + const extendedSchemaParsing = parse(record, undefined, schema, true); + if (extendedSchemaParsing.success) + return extendedSchemaParsing.data as DynamoDBRecord; + if (schema['~standard'].vendor === SchemaType.Zod) { + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' + ); + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema + OldImage: DynamoDBMarshalled( + schema as any + ).optional(), + // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema + NewImage: DynamoDBMarshalled( + schema as any + ).optional(), + }), + }); + return parse(record, undefined, extendedSchema); + } + console.warn( + 'The schema provided is not supported. Only Zod schemas are supported for extension.' + ); + throw new Error('Unsupported schema type'); + } console.warn( `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` ); diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 9bf47b9734..3eb935d5cb 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,5 +1,9 @@ +import { + DynamoDBStreamChangeRecordBase, + DynamoDBStreamRecord, +} from '@aws-lambda-powertools/parser/schemas/dynamodb'; import context from '@aws-lambda-powertools/testing-utils/context'; -import type { Context, SQSRecord } from 'aws-lambda'; +import type { Context, DynamoDBRecord, SQSRecord } from 'aws-lambda'; import * as v from 'valibot'; import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { z } from 'zod'; @@ -289,7 +293,7 @@ describe('Class: AsyncBatchProcessor', () => { expect(() => processor.processSync()).toThrowError(BatchProcessingError); }); - describe('Batch processing with Parser Integration: Passing Internal Schema', () => { + describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ @@ -490,7 +494,7 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Extended Schema', () => { + describe('Batch processing with Parser Integration: Passing Extended SQS Record Schema', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ @@ -645,4 +649,333 @@ describe('Class: AsyncBatchProcessor', () => { ); }); }); + + describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + Message: z.string(), + }); + const firstRecord = dynamodbRecordFactory('failure'); + const secondRecord = dynamodbRecordFactory('success'); + const thirdRecord = dynamodbRecordFactory('fail'); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncDynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[1]).toStrictEqual(['success', '', secondRecord]); + expect(processor.failureMessages.length).toBe(2); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, + { itemIdentifier: thirdRecord.dynamodb?.SequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 'invalid-age', + }; + const customObject2 = { + name: 20, + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + + it('completes processing with failures if an unsupported event type is used for parsing', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + //@ts-expect-error + const processor = new BatchProcessor('invalid-event-type', { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + + it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + // Prepare + const customSchema = v.object({ + name: v.string(), + age: v.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async (customObject: SQSRecord) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); + + describe('Batch processing with Parser Integration: Passing Extended DynamoDB Record Schema', () => { + it.only('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + Message: z.string(), + }); + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' + ); + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + OldImage: DynamoDBMarshalled(customSchema).optional(), + }), + }); + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); + const secondRecord = dynamodbRecordFactory('success'); + //@ts-expect-error Passing a number + const thirdRecord = dynamodbRecordFactory(2); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: extendedSchema, + }); + + // Act + processor.register(records, asyncDynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(2); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, + { itemIdentifier: thirdRecord.dynamodb?.SequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema + body: JSONStringified(customSchema as any), + }); + const customObject1 = { + name: 'test-1', + age: 'invalid-age', + }; + const customObject2 = { + name: 20, + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: extendedSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); }); From f248e9f8e0ce6ec0040ee7394b6a2ec12d78400d Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 21:27:07 +0100 Subject: [PATCH 10/19] Fixed the tests for DynamoDB record processing --- .../batch/tests/unit/BatchProcessor.test.ts | 242 +++++------------- 1 file changed, 63 insertions(+), 179 deletions(-) diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 3eb935d5cb..339fb81658 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -656,9 +656,10 @@ describe('Class: AsyncBatchProcessor', () => { const customSchema = z.object({ Message: z.string(), }); - const firstRecord = dynamodbRecordFactory('failure'); + + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); const secondRecord = dynamodbRecordFactory('success'); - const thirdRecord = dynamodbRecordFactory('fail'); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: customSchema, @@ -669,12 +670,15 @@ describe('Class: AsyncBatchProcessor', () => { const processedMessages = await processor.process(); // Assess - expect(processedMessages[1]).toStrictEqual(['success', '', secondRecord]); - expect(processor.failureMessages.length).toBe(2); + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(1); expect(processor.response()).toStrictEqual({ batchItemFailures: [ { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, - { itemIdentifier: thirdRecord.dynamodb?.SequenceNumber }, ], }); }); @@ -682,114 +686,44 @@ describe('Class: AsyncBatchProcessor', () => { it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare const customSchema = z.object({ - name: z.string(), - age: z.number(), + Message: z.string(), }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const firstRecord = dynamodbRecordFactory('success'); + const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { + const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: customSchema, }); // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + processor.register(records, asyncDynamodbRecordHandler, options); const processedMessages = await processor.process(); // Assess expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], ]); }); it('completes processing with all failures if all the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, + Message: z.string(), }); - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - - it('completes processing with failures if an unsupported event type is used for parsing', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = dynamodbRecordFactory(2); const records = [firstRecord, secondRecord]; - //@ts-expect-error - const processor = new BatchProcessor('invalid-event-type', { + const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: customSchema, }); // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + processor.register(records, asyncDynamodbRecordHandler, options); // Assess await expect(processor.process()).rejects.toThrowError( @@ -800,32 +734,18 @@ describe('Class: AsyncBatchProcessor', () => { it('completes processing with failures if an unsupported schema type is used for parsing', async () => { // Prepare const customSchema = v.object({ - name: v.string(), - age: v.number(), + Message: v.string(), }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + + const firstRecord = dynamodbRecordFactory('success'); + const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { + const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: customSchema, }); // Act - processor.register( - records, - async (customObject: SQSRecord) => { - return customObject.body; - }, - options - ); + processor.register(records, asyncDynamodbRecordHandler, options); // Assess await expect(processor.process()).rejects.toThrowError( @@ -835,7 +755,7 @@ describe('Class: AsyncBatchProcessor', () => { }); describe('Batch processing with Parser Integration: Passing Extended DynamoDB Record Schema', () => { - it.only('completes the processing with failures if some of the payload does not match the passed schema', async () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ Message: z.string(), @@ -847,14 +767,12 @@ describe('Class: AsyncBatchProcessor', () => { await import('@aws-lambda-powertools/parser/schemas/dynamodb'); const extendedSchema = DynamoDBStreamRecord.extend({ dynamodb: DynamoDBStreamChangeRecordBase.extend({ - OldImage: DynamoDBMarshalled(customSchema).optional(), + NewImage: DynamoDBMarshalled(customSchema).optional(), }), }); //@ts-expect-error Passing a number const firstRecord = dynamodbRecordFactory(1); const secondRecord = dynamodbRecordFactory('success'); - //@ts-expect-error Passing a number - const thirdRecord = dynamodbRecordFactory(2); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: extendedSchema, @@ -870,11 +788,10 @@ describe('Class: AsyncBatchProcessor', () => { 'success', secondRecord, ]); - expect(processor.failureMessages.length).toBe(2); + expect(processor.failureMessages.length).toBe(1); expect(processor.response()).toStrictEqual({ batchItemFailures: [ { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, - { itemIdentifier: thirdRecord.dynamodb?.SequenceNumber }, ], }); }); @@ -882,95 +799,62 @@ describe('Class: AsyncBatchProcessor', () => { it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare const customSchema = z.object({ - name: z.string(), - age: z.number(), + Message: z.string(), }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const firstRecord = dynamodbRecordFactory('success'); + const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { + const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: extendedSchema, }); // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + processor.register(records, asyncDynamodbRecordHandler, options); const processedMessages = await processor.process(); // Assess expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], ]); }); it('completes processing with all failures if all the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ - name: z.string(), - age: z.number(), + Message: z.string(), }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = dynamodbRecordFactory(2); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { + const processor = new BatchProcessor(EventType.DynamoDBStreams, { schema: extendedSchema, }); // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); + processor.register(records, asyncDynamodbRecordHandler, options); // Assess await expect(processor.process()).rejects.toThrowError( From 7120337f595b7356be37b501be5bebee8e63465e Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Wed, 3 Sep 2025 21:32:26 +0100 Subject: [PATCH 11/19] Removed unused imports and unused comments --- packages/batch/src/BatchProcessor.ts | 1 - packages/batch/tests/unit/BatchProcessor.test.ts | 15 ++++----------- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index c472192550..14267e87ea 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -1,6 +1,5 @@ import type { StandardSchemaV1 } from '@standard-schema/spec'; import type { - AttributeValue, DynamoDBRecord, KinesisStreamRecord, SQSRecord, diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 339fb81658..919e5f1c78 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,9 +1,5 @@ -import { - DynamoDBStreamChangeRecordBase, - DynamoDBStreamRecord, -} from '@aws-lambda-powertools/parser/schemas/dynamodb'; import context from '@aws-lambda-powertools/testing-utils/context'; -import type { Context, DynamoDBRecord, SQSRecord } from 'aws-lambda'; +import type { Context, SQSRecord } from 'aws-lambda'; import * as v from 'valibot'; import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { z } from 'zod'; @@ -508,8 +504,7 @@ describe('Class: AsyncBatchProcessor', () => { '@aws-lambda-powertools/parser/schemas/sqs' ); const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), + body: JSONStringified(customSchema), }); const customObject1 = { name: 'test-1', @@ -563,8 +558,7 @@ describe('Class: AsyncBatchProcessor', () => { '@aws-lambda-powertools/parser/schemas/sqs' ); const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), + body: JSONStringified(customSchema), }); const customObject1 = { name: 'test-1', @@ -613,8 +607,7 @@ describe('Class: AsyncBatchProcessor', () => { '@aws-lambda-powertools/parser/schemas/sqs' ); const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: at least for now, we need to broaden the type because the JSONstringified helper method is not typed with StandardSchemaV1 but with ZodSchema - body: JSONStringified(customSchema as any), + body: JSONStringified(customSchema), }); const customObject1 = { name: 'test-1', From b33832f837ab0e0d3232baf90e33381c34638433 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 12:21:43 +0100 Subject: [PATCH 12/19] Integrated Parser for the Kinesis event type --- packages/batch/src/BatchProcessor.ts | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 14267e87ea..650fc9ecc1 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -181,6 +181,29 @@ class BatchProcessor extends BasePartialBatchProcessor { ); throw new Error('Unsupported schema type'); } + if (eventType === EventType.KinesisDataStreams) { + const extendedSchemaParsing = parse(record, undefined, schema, true); + if (extendedSchemaParsing.success) + return extendedSchemaParsing.data as KinesisStreamRecord; + if (schema['~standard'].vendor === SchemaType.Zod) { + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema + data: Base64Encoded(schema as any), + }), + }); + return parse(record, undefined, extendedSchema); + } + console.warn( + 'The schema provided is not supported. Only Zod schemas are supported for extension.' + ); + throw new Error('Unsupported schema type'); + } if (eventType === EventType.DynamoDBStreams) { const extendedSchemaParsing = parse(record, undefined, schema, true); if (extendedSchemaParsing.success) From f1e8f7026f5fd12b7a73a87fa0b831f8977ad0a0 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 14:07:13 +0100 Subject: [PATCH 13/19] Added tests for parser integration with kinesis and some refactoring --- packages/batch/src/BatchProcessor.ts | 142 +++++++------ .../batch/tests/unit/BatchProcessor.test.ts | 196 ++++++++++++++++++ 2 files changed, 269 insertions(+), 69 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 650fc9ecc1..b9fce6a610 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -140,103 +140,107 @@ class BatchProcessor extends BasePartialBatchProcessor { } /** - * Parse the record according to the schema passed. - * - * If the passed schema is already an extended schema, - * it directly uses the schema to parse the record - * - * If the passed schema is an internal payload schema, - * it checks whether it is a zod schema and - * then extends the zod schema according to the passed event type for parsing + * Create an extended schema according to the event type passed. * - * @param record The record to be parsed - * @param eventType The type of event to process + * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) * @param schema The StandardSchema to be used for parsing */ - private async parseRecord( - record: EventSourceDataClassTypes, + private async createExtendedSchema( eventType: keyof typeof EventType, schema: StandardSchemaV1 - ): Promise { - const { parse } = await import('@aws-lambda-powertools/parser'); - if (eventType === EventType.SQS) { - const extendedSchemaParsing = parse(record, undefined, schema, true); - if (extendedSchemaParsing.success) - return extendedSchemaParsing.data as SQSRecord; - if (schema['~standard'].vendor === SchemaType.Zod) { - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema + ) { + switch (eventType) { + case EventType.SQS: { + const [{ JSONStringified }, { SqsRecordSchema }] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers'), + import('@aws-lambda-powertools/parser/schemas/sqs'), + ]); + return SqsRecordSchema.extend({ body: JSONStringified(schema as any), }); - return parse(record, undefined, extendedSchema); } - console.warn( - 'The schema provided is not supported. Only Zod schemas are supported for extension.' - ); - throw new Error('Unsupported schema type'); - } - if (eventType === EventType.KinesisDataStreams) { - const extendedSchemaParsing = parse(record, undefined, schema, true); - if (extendedSchemaParsing.success) - return extendedSchemaParsing.data as KinesisStreamRecord; - if (schema['~standard'].vendor === SchemaType.Zod) { - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ + case EventType.KinesisDataStreams: { + const [ + { Base64Encoded }, + { KinesisDataStreamRecord, KinesisDataStreamRecordPayload }, + ] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers'), + import('@aws-lambda-powertools/parser/schemas/kinesis'), + ]); + return KinesisDataStreamRecord.extend({ kinesis: KinesisDataStreamRecordPayload.extend({ - // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema data: Base64Encoded(schema as any), }), }); - return parse(record, undefined, extendedSchema); } - console.warn( - 'The schema provided is not supported. Only Zod schemas are supported for extension.' - ); - throw new Error('Unsupported schema type'); - } - if (eventType === EventType.DynamoDBStreams) { - const extendedSchemaParsing = parse(record, undefined, schema, true); - if (extendedSchemaParsing.success) - return extendedSchemaParsing.data as DynamoDBRecord; - if (schema['~standard'].vendor === SchemaType.Zod) { - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ + case EventType.DynamoDBStreams: { + const [ + { DynamoDBMarshalled }, + { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase }, + ] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers/dynamodb'), + import('@aws-lambda-powertools/parser/schemas/dynamodb'), + ]); + return DynamoDBStreamRecord.extend({ dynamodb: DynamoDBStreamChangeRecordBase.extend({ - // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema OldImage: DynamoDBMarshalled( schema as any ).optional(), - // biome-ignore lint/suspicious/noExplicitAny: The vendor field in the schema is verified that the schema is a Zod schema NewImage: DynamoDBMarshalled( schema as any ).optional(), }), }); - return parse(record, undefined, extendedSchema); } + default: + console.warn( + `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` + ); + throw new Error('Unsupported event type'); + } + } + + /** + * Parse the record according to the schema passed. + * + * If the passed schema is already an extended schema, + * it directly uses the schema to parse the record + * + * If the passed schema is an internal payload schema, + * it checks whether it is a zod schema and + * then extends the zod schema according to the passed event type for parsing + * + * @param record The record to be parsed + * @param eventType The type of event to process + * @param schema The StandardSchema to be used for parsing + */ + private async parseRecord( + record: EventSourceDataClassTypes, + eventType: keyof typeof EventType, + schema: StandardSchemaV1 + ): Promise { + const { parse } = await import('@aws-lambda-powertools/parser'); + // Try parsing with the original schema first + const extendedSchemaParsing = parse(record, undefined, schema, true); + if (extendedSchemaParsing.success) { + return extendedSchemaParsing.data as + | SQSRecord + | KinesisStreamRecord + | DynamoDBRecord; + } + // Only proceed with schema extension if it's a Zod schema + if (schema['~standard'].vendor !== SchemaType.Zod) { console.warn( 'The schema provided is not supported. Only Zod schemas are supported for extension.' ); throw new Error('Unsupported schema type'); } - console.warn( - `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` - ); - throw new Error('Unsupported event type'); + // Handle schema extension based on event type + const extendedSchema = await this.createExtendedSchema(eventType, schema); + return parse(record, undefined, extendedSchema) as + | SQSRecord + | KinesisStreamRecord + | DynamoDBRecord; } } diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index 919e5f1c78..e7ca001c06 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -855,4 +855,200 @@ describe('Class: AsyncBatchProcessor', () => { ); }); }); + + describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.string(); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.kinesis.sequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.string(); + const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.string(); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = kinesisRecordFactory(1); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + + it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + // Prepare + const customSchema = v.string(); + + const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); + + describe('Batch processing with Parser Integration: Passing Extended Kinesis Record Schema', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: extendedSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.kinesis.sequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); + const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: extendedSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = kinesisRecordFactory(1); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: extendedSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + }); }); From ce3fc4f3132956ec5453950224022a3e3c52aef8 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 14:41:24 +0100 Subject: [PATCH 14/19] Fixed the SonarQube finding --- packages/batch/src/BatchProcessor.ts | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index b9fce6a610..2f9c57abe8 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -218,15 +218,12 @@ class BatchProcessor extends BasePartialBatchProcessor { record: EventSourceDataClassTypes, eventType: keyof typeof EventType, schema: StandardSchemaV1 - ): Promise { + ): Promise { const { parse } = await import('@aws-lambda-powertools/parser'); // Try parsing with the original schema first const extendedSchemaParsing = parse(record, undefined, schema, true); if (extendedSchemaParsing.success) { - return extendedSchemaParsing.data as - | SQSRecord - | KinesisStreamRecord - | DynamoDBRecord; + return extendedSchemaParsing.data as EventSourceDataClassTypes; } // Only proceed with schema extension if it's a Zod schema if (schema['~standard'].vendor !== SchemaType.Zod) { @@ -237,10 +234,11 @@ class BatchProcessor extends BasePartialBatchProcessor { } // Handle schema extension based on event type const extendedSchema = await this.createExtendedSchema(eventType, schema); - return parse(record, undefined, extendedSchema) as - | SQSRecord - | KinesisStreamRecord - | DynamoDBRecord; + return parse( + record, + undefined, + extendedSchema + ) as EventSourceDataClassTypes; } } From ee115dd858139c0cf4535ad8ad9ee13209a147b8 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 15:10:43 +0100 Subject: [PATCH 15/19] Marked schema property as protected --- packages/batch/src/BasePartialBatchProcessor.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/batch/src/BasePartialBatchProcessor.ts b/packages/batch/src/BasePartialBatchProcessor.ts index e7f824fed1..529a3870d9 100644 --- a/packages/batch/src/BasePartialBatchProcessor.ts +++ b/packages/batch/src/BasePartialBatchProcessor.ts @@ -47,7 +47,7 @@ abstract class BasePartialBatchProcessor extends BasePartialProcessor { /** * The schema of the body of the event record for parsing */ - public schema?: StandardSchemaV1; + protected schema?: StandardSchemaV1; /** * Initializes base batch processing class From f36d513f06dc50c3851454032a31460e6ae9ba5c Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 15:43:36 +0100 Subject: [PATCH 16/19] Added braces for the default block --- packages/batch/src/BatchProcessor.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index 2f9c57abe8..c83ebc881e 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -192,11 +192,12 @@ class BatchProcessor extends BasePartialBatchProcessor { }), }); } - default: + default: { console.warn( `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` ); throw new Error('Unsupported event type'); + } } } From 29da8ffa3ed9c768ac09b9ab8393a9b4c25fa5ee Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Thu, 4 Sep 2025 23:32:09 +0100 Subject: [PATCH 17/19] Implemented parsing with and without the transformers --- packages/batch/src/BatchProcessor.ts | 127 +++-- .../batch/tests/unit/BatchProcessor.test.ts | 530 +++++++++++++----- 2 files changed, 480 insertions(+), 177 deletions(-) diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index c83ebc881e..e5768dc403 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -142,56 +142,87 @@ class BatchProcessor extends BasePartialBatchProcessor { /** * Create an extended schema according to the event type passed. * + * If useTransformers is true, parsing with transformers + * else parse without transformers + * * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) * @param schema The StandardSchema to be used for parsing + * @param useTransformers Whether to use transformers for parsing */ private async createExtendedSchema( eventType: keyof typeof EventType, - schema: StandardSchemaV1 + schema: StandardSchemaV1, + useTransformers: boolean ) { switch (eventType) { case EventType.SQS: { - const [{ JSONStringified }, { SqsRecordSchema }] = await Promise.all([ - import('@aws-lambda-powertools/parser/helpers'), - import('@aws-lambda-powertools/parser/schemas/sqs'), - ]); - return SqsRecordSchema.extend({ - body: JSONStringified(schema as any), - }); + if (useTransformers) { + const [{ JSONStringified }, { SqsRecordSchema }] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers'), + import('@aws-lambda-powertools/parser/schemas/sqs'), + ]); + return SqsRecordSchema.extend({ + body: JSONStringified(schema as any), + }); + } + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + return SqsRecordSchema.extend({ body: schema }); } + case EventType.KinesisDataStreams: { - const [ - { Base64Encoded }, - { KinesisDataStreamRecord, KinesisDataStreamRecordPayload }, - ] = await Promise.all([ - import('@aws-lambda-powertools/parser/helpers'), - import('@aws-lambda-powertools/parser/schemas/kinesis'), - ]); + if (useTransformers) { + const [ + { Base64Encoded }, + { KinesisDataStreamRecord, KinesisDataStreamRecordPayload }, + ] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers'), + import('@aws-lambda-powertools/parser/schemas/kinesis'), + ]); + return KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(schema as any), + }), + }); + } + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); return KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(schema as any), - }), + kinesis: KinesisDataStreamRecordPayload.extend({ data: schema }), }); } + case EventType.DynamoDBStreams: { - const [ - { DynamoDBMarshalled }, - { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase }, - ] = await Promise.all([ - import('@aws-lambda-powertools/parser/helpers/dynamodb'), - import('@aws-lambda-powertools/parser/schemas/dynamodb'), - ]); + if (useTransformers) { + const [ + { DynamoDBMarshalled }, + { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase }, + ] = await Promise.all([ + import('@aws-lambda-powertools/parser/helpers/dynamodb'), + import('@aws-lambda-powertools/parser/schemas/dynamodb'), + ]); + return DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + OldImage: DynamoDBMarshalled( + schema as any + ).optional(), + NewImage: DynamoDBMarshalled( + schema as any + ).optional(), + }), + }); + } + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); return DynamoDBStreamRecord.extend({ dynamodb: DynamoDBStreamChangeRecordBase.extend({ - OldImage: DynamoDBMarshalled( - schema as any - ).optional(), - NewImage: DynamoDBMarshalled( - schema as any - ).optional(), + OldImage: (schema as any).optional(), + NewImage: (schema as any).optional(), }), }); } + default: { console.warn( `The event type provided is not supported. Supported events: ${Object.values(EventType).join(',')}` @@ -207,9 +238,7 @@ class BatchProcessor extends BasePartialBatchProcessor { * If the passed schema is already an extended schema, * it directly uses the schema to parse the record * - * If the passed schema is an internal payload schema, - * it checks whether it is a zod schema and - * then extends the zod schema according to the passed event type for parsing + * Only Zod Schemas are supported for automatic schema extension * * @param record The record to be parsed * @param eventType The type of event to process @@ -234,12 +263,30 @@ class BatchProcessor extends BasePartialBatchProcessor { throw new Error('Unsupported schema type'); } // Handle schema extension based on event type - const extendedSchema = await this.createExtendedSchema(eventType, schema); - return parse( - record, - undefined, - extendedSchema - ) as EventSourceDataClassTypes; + try { + // Try without transformers first, then with transformers + const extendedSchemaWithoutTransformers = await this.createExtendedSchema( + eventType, + schema, + false + ); + return parse( + record, + undefined, + extendedSchemaWithoutTransformers + ) as EventSourceDataClassTypes; + } catch { + const extendedSchemaWithTransformers = await this.createExtendedSchema( + eventType, + schema, + true + ); + return parse( + record, + undefined, + extendedSchemaWithTransformers + ) as EventSourceDataClassTypes; + } } } diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index e7ca001c06..c096004bcf 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -1,3 +1,8 @@ +import { + Base64Encoded, + JSONStringified, +} from '@aws-lambda-powertools/parser/helpers'; +import { DynamoDBMarshalled } from '@aws-lambda-powertools/parser/helpers/dynamodb'; import context from '@aws-lambda-powertools/testing-utils/context'; import type { Context, SQSRecord } from 'aws-lambda'; import * as v from 'valibot'; @@ -289,13 +294,97 @@ describe('Class: AsyncBatchProcessor', () => { expect(() => processor.processSync()).toThrowError(BatchProcessingError); }); - describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema', () => { + it('completes processing with failures if an unsupported event type is used for parsing', async () => { + // Prepare + const customSchema = z.object({ + name: z.string(), + age: z.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + //@ts-expect-error + const processor = new BatchProcessor('invalid-event-type', { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + + it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + // Prepare + const customSchema = v.object({ + name: v.string(), + age: v.number(), + }); + const customObject1 = { + name: 'test-1', + age: 20, + }; + const customObject2 = { + name: 'test-2', + age: 'invalid-age', + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async (customObject: SQSRecord) => { + return customObject.body; + }, + options + ); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + + describe('Batch processing with Parser Integration: Passing Extended SQS Record Schema', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ name: z.string(), age: z.number(), }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + body: JSONStringified(customSchema), + }); const customObject1 = { name: 'test-1', age: 20, @@ -308,7 +397,7 @@ describe('Class: AsyncBatchProcessor', () => { const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -341,6 +430,15 @@ describe('Class: AsyncBatchProcessor', () => { name: z.string(), age: z.number(), }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + body: JSONStringified(customSchema), + }); const customObject1 = { name: 'test-1', age: 20, @@ -353,7 +451,7 @@ describe('Class: AsyncBatchProcessor', () => { const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -381,6 +479,15 @@ describe('Class: AsyncBatchProcessor', () => { name: z.string(), age: z.number(), }); + const { JSONStringified } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { SqsRecordSchema } = await import( + '@aws-lambda-powertools/parser/schemas/sqs' + ); + const extendedSchema = SqsRecordSchema.extend({ + body: JSONStringified(customSchema), + }); const customObject1 = { name: 'test-1', age: 'invalid-age', @@ -394,7 +501,7 @@ describe('Class: AsyncBatchProcessor', () => { const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -413,13 +520,17 @@ describe('Class: AsyncBatchProcessor', () => { FullBatchFailureError ); }); + }); - it('completes processing with failures if an unsupported event type is used for parsing', async () => { + describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema with transformers', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); + const customSchema = JSONStringified( + z.object({ + name: z.string(), + age: z.number(), + }) + ); const customObject1 = { name: 'test-1', age: 20, @@ -431,8 +542,7 @@ describe('Class: AsyncBatchProcessor', () => { const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); const records = [firstRecord, secondRecord]; - //@ts-expect-error - const processor = new BatchProcessor('invalid-event-type', { + const processor = new BatchProcessor(EventType.SQS, { schema: customSchema, }); @@ -446,29 +556,81 @@ describe('Class: AsyncBatchProcessor', () => { }, options ); + const processedMessages = await processor.process(); // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); + expect(processedMessages[0]).toStrictEqual([ + 'success', + customObject1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], + }); }); - it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare - const customSchema = v.object({ - name: v.string(), - age: v.number(), - }); + const customSchema = JSONStringified( + z.object({ + name: z.string(), + age: z.number(), + }) + ); const customObject1 = { name: 'test-1', age: 20, }; const customObject2 = { name: 'test-2', + age: 30, + }; + const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); + const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: customSchema, + }); + + // Act + processor.register( + records, + async ( + customObject: SQSRecord & { body: z.infer } + ) => { + return customObject.body; + }, + options + ); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', customObject1, firstRecord], + ['success', customObject2, secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = JSONStringified( + z.object({ + name: z.string(), + age: z.number(), + }) + ); + const customObject1 = { + name: 'test-1', age: 'invalid-age', }; + const customObject2 = { + name: 20, + age: 30, + }; const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); + const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { schema: customSchema, @@ -477,7 +639,9 @@ describe('Class: AsyncBatchProcessor', () => { // Act processor.register( records, - async (customObject: SQSRecord) => { + async ( + customObject: SQSRecord & { body: z.infer } + ) => { return customObject.body; }, options @@ -490,22 +654,13 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Extended SQS Record Schema', () => { + describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema without transformers', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ name: z.string(), age: z.number(), }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); const customObject1 = { name: 'test-1', age: 20, @@ -518,7 +673,7 @@ describe('Class: AsyncBatchProcessor', () => { const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -551,15 +706,6 @@ describe('Class: AsyncBatchProcessor', () => { name: z.string(), age: z.number(), }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); const customObject1 = { name: 'test-1', age: 20, @@ -572,7 +718,7 @@ describe('Class: AsyncBatchProcessor', () => { const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -600,15 +746,6 @@ describe('Class: AsyncBatchProcessor', () => { name: z.string(), age: z.number(), }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); const customObject1 = { name: 'test-1', age: 'invalid-age', @@ -622,7 +759,7 @@ describe('Class: AsyncBatchProcessor', () => { const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -643,19 +780,28 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema', () => { + describe('Batch processing with Parser Integration: Passing Extended DynamoDB Record Schema', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ Message: z.string(), }); - + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' + ); + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), + }); //@ts-expect-error Passing a number const firstRecord = dynamodbRecordFactory(1); const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -681,12 +827,21 @@ describe('Class: AsyncBatchProcessor', () => { const customSchema = z.object({ Message: z.string(), }); - + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' + ); + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), + }); const firstRecord = dynamodbRecordFactory('success'); const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -705,14 +860,23 @@ describe('Class: AsyncBatchProcessor', () => { const customSchema = z.object({ Message: z.string(), }); - + const { DynamoDBMarshalled } = await import( + '@aws-lambda-powertools/parser/helpers/dynamodb' + ); + const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = + await import('@aws-lambda-powertools/parser/schemas/dynamodb'); + const extendedSchema = DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), + }); //@ts-expect-error Passing a number const firstRecord = dynamodbRecordFactory(1); //@ts-expect-error Passing a number const secondRecord = dynamodbRecordFactory(2); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -723,13 +887,51 @@ describe('Class: AsyncBatchProcessor', () => { FullBatchFailureError ); }); + }); - it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema with transformers', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare - const customSchema = v.object({ - Message: v.string(), + const customSchema = DynamoDBMarshalled( + z.object({ + Message: z.string(), + }) + ); + + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); + const secondRecord = dynamodbRecordFactory('success'); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: customSchema, }); + // Act + processor.register(records, asyncDynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = DynamoDBMarshalled( + z.object({ + Message: z.string(), + }) + ); + const firstRecord = dynamodbRecordFactory('success'); const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; @@ -737,6 +939,34 @@ describe('Class: AsyncBatchProcessor', () => { schema: customSchema, }); + // Act + processor.register(records, asyncDynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = DynamoDBMarshalled( + z.object({ + Message: z.string(), + }) + ); + + //@ts-expect-error Passing a number + const firstRecord = dynamodbRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = dynamodbRecordFactory(2); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: customSchema, + }); + // Act processor.register(records, asyncDynamodbRecordHandler, options); @@ -747,28 +977,19 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Extended DynamoDB Record Schema', () => { + describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema without transformers', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.object({ Message: z.string(), }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); + //@ts-expect-error Passing a number const firstRecord = dynamodbRecordFactory(1); const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -794,21 +1015,12 @@ describe('Class: AsyncBatchProcessor', () => { const customSchema = z.object({ Message: z.string(), }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); + const firstRecord = dynamodbRecordFactory('success'); const secondRecord = dynamodbRecordFactory('success'); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -827,23 +1039,14 @@ describe('Class: AsyncBatchProcessor', () => { const customSchema = z.object({ Message: z.string(), }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); + //@ts-expect-error Passing a number const firstRecord = dynamodbRecordFactory(1); //@ts-expect-error Passing a number const secondRecord = dynamodbRecordFactory(2); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -856,16 +1059,26 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema', () => { + describe('Batch processing with Parser Integration: Passing Extended Kinesis Record Schema', () => { it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); //@ts-expect-error Passing a number const firstRecord = kinesisRecordFactory(1); const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -889,11 +1102,21 @@ describe('Class: AsyncBatchProcessor', () => { it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -910,13 +1133,23 @@ describe('Class: AsyncBatchProcessor', () => { it('completes processing with all failures if all the payload does not match the passed schema', async () => { // Prepare const customSchema = z.string(); + const { Base64Encoded } = await import( + '@aws-lambda-powertools/parser/helpers' + ); + const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = + await import('@aws-lambda-powertools/parser/schemas/kinesis'); + const extendedSchema = KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }); //@ts-expect-error Passing a number const firstRecord = kinesisRecordFactory(1); //@ts-expect-error Passing a number const secondRecord = kinesisRecordFactory(1); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, + schema: extendedSchema, }); // Act @@ -927,11 +1160,41 @@ describe('Class: AsyncBatchProcessor', () => { FullBatchFailureError ); }); + }); - it('completes processing with failures if an unsupported schema type is used for parsing', async () => { + describe('Batch processing with Parser Integration: Passing Internal Kinesis Record Schema with transformers', () => { + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare - const customSchema = v.string(); + const customSchema = Base64Encoded(z.string()); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + // Assess + expect(processedMessages[1]).toStrictEqual([ + 'success', + 'success', + secondRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: firstRecord.kinesis.sequenceNumber }, + ], + }); + }); + + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const customSchema = Base64Encoded(z.string()); const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const records = [firstRecord, secondRecord]; @@ -939,6 +1202,29 @@ describe('Class: AsyncBatchProcessor', () => { schema: customSchema, }); + // Act + processor.register(records, asyncKinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', 'success', firstRecord], + ['success', 'success', secondRecord], + ]); + }); + + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const customSchema = Base64Encoded(z.string()); + //@ts-expect-error Passing a number + const firstRecord = kinesisRecordFactory(1); + //@ts-expect-error Passing a number + const secondRecord = kinesisRecordFactory(1); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: customSchema, + }); + // Act processor.register(records, asyncKinesisRecordHandler, options); @@ -949,26 +1235,16 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - describe('Batch processing with Parser Integration: Passing Extended Kinesis Record Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + describe('Batch processing with Parser Integration: Passing Internal Kinesis Record Schema without transformers', () => { + it.skip('completes the processing with failures if some of the payload does not match the passed schema', async () => { // Prepare const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); //@ts-expect-error Passing a number const firstRecord = kinesisRecordFactory(1); const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -989,24 +1265,14 @@ describe('Class: AsyncBatchProcessor', () => { }); }); - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + it.skip('completes the processing with no failures and parses the payload before passing to the record handler', async () => { // Prepare const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act @@ -1023,23 +1289,13 @@ describe('Class: AsyncBatchProcessor', () => { it('completes processing with all failures if all the payload does not match the passed schema', async () => { // Prepare const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); //@ts-expect-error Passing a number const firstRecord = kinesisRecordFactory(1); //@ts-expect-error Passing a number const secondRecord = kinesisRecordFactory(1); const records = [firstRecord, secondRecord]; const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, + schema: customSchema, }); // Act From 4f38337c61540b53eac9146aa9fb3b9382e85527 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Fri, 5 Sep 2025 10:38:09 +0100 Subject: [PATCH 18/19] Refactored the tests to reduce duplicate code --- .../batch/tests/unit/BatchProcessor.test.ts | 1242 ++++------------- 1 file changed, 289 insertions(+), 953 deletions(-) diff --git a/packages/batch/tests/unit/BatchProcessor.test.ts b/packages/batch/tests/unit/BatchProcessor.test.ts index c096004bcf..020038b3fc 100644 --- a/packages/batch/tests/unit/BatchProcessor.test.ts +++ b/packages/batch/tests/unit/BatchProcessor.test.ts @@ -3,8 +3,22 @@ import { JSONStringified, } from '@aws-lambda-powertools/parser/helpers'; import { DynamoDBMarshalled } from '@aws-lambda-powertools/parser/helpers/dynamodb'; +import { + KinesisDataStreamRecord, + SqsRecordSchema, +} from '@aws-lambda-powertools/parser/schemas'; +import { + DynamoDBStreamChangeRecordBase, + DynamoDBStreamRecord, +} from '@aws-lambda-powertools/parser/schemas/dynamodb'; +import { KinesisDataStreamRecordPayload } from '@aws-lambda-powertools/parser/schemas/kinesis'; import context from '@aws-lambda-powertools/testing-utils/context'; -import type { Context, SQSRecord } from 'aws-lambda'; +import type { + Context, + DynamoDBRecord, + KinesisStreamRecord, + SQSRecord, +} from 'aws-lambda'; import * as v from 'valibot'; import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'; import { z } from 'zod'; @@ -294,1012 +308,334 @@ describe('Class: AsyncBatchProcessor', () => { expect(() => processor.processSync()).toThrowError(BatchProcessingError); }); - it('completes processing with failures if an unsupported event type is used for parsing', async () => { - // Prepare + describe('Batch processing with Parser Integration', () => { const customSchema = z.object({ - name: z.string(), - age: z.number(), + Message: z.string(), }); - const customObject1 = { - name: 'test-1', - age: 20, + const successPayload1 = { + Message: 'test-1', }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', + const successPayload2 = { + Message: 'test-2', }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - //@ts-expect-error - const processor = new BatchProcessor('invalid-event-type', { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; - }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - - it('completes processing with failures if an unsupported schema type is used for parsing', async () => { - // Prepare - const customSchema = v.object({ - name: v.string(), - age: v.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, + const failurePayload1 = { + Message: 1, }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', + const failurePayload2 = { + Message: 2, }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async (customObject: SQSRecord) => { - return customObject.body; - }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - - describe('Batch processing with Parser Integration: Passing Extended SQS Record Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + const sqsRecordHandler = async (parsedRecord: SQSRecord) => { + return parsedRecord.body; + }; + const kinesisRecordHandler = async (parsedRecord: KinesisStreamRecord) => { + return parsedRecord.kinesis.data; + }; + const dynamodbRecordHandler = async (parsedRecord: DynamoDBRecord) => { + return parsedRecord.dynamodb?.NewImage; + }; + const cases = [ + { + description: 'passing Extended Schema', + SQS: { + schema: SqsRecordSchema.extend({ + body: JSONStringified(customSchema), + }), }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[0]).toStrictEqual([ - 'success', - customObject1, - firstRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + Kinesis: { + schema: KinesisDataStreamRecord.extend({ + kinesis: KinesisDataStreamRecordPayload.extend({ + data: Base64Encoded(customSchema).optional(), + }), + }), }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const { JSONStringified } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { SqsRecordSchema } = await import( - '@aws-lambda-powertools/parser/schemas/sqs' - ); - const extendedSchema = SqsRecordSchema.extend({ - body: JSONStringified(customSchema), - }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: extendedSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + DynamoDB: { + schema: DynamoDBStreamRecord.extend({ + dynamodb: DynamoDBStreamChangeRecordBase.extend({ + NewImage: DynamoDBMarshalled(customSchema).optional(), + }), + }), }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema with transformers', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = JSONStringified( - z.object({ - name: z.string(), - age: z.number(), - }) - ); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + }, + { + description: 'passing Internal Schema without transformers', + SQS: { + schema: customSchema, }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[0]).toStrictEqual([ - 'success', - customObject1, - firstRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = JSONStringified( - z.object({ - name: z.string(), - age: z.number(), - }) - ); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + Kinesis: { + schema: customSchema, }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = JSONStringified( - z.object({ - name: z.string(), - age: z.number(), - }) - ); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + DynamoDB: { + schema: customSchema, }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Internal SQS Record Schema without transformers', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 'invalid-age', - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + }, + { + description: 'passing Internal Schema with transformers', + SQS: { + schema: JSONStringified(customSchema), }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[0]).toStrictEqual([ - 'success', - customObject1, - firstRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 20, - }; - const customObject2 = { - name: 'test-2', - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + Kinesis: { + schema: Base64Encoded(customSchema), }, - options - ); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', customObject1, firstRecord], - ['success', customObject2, secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - name: z.string(), - age: z.number(), - }); - const customObject1 = { - name: 'test-1', - age: 'invalid-age', - }; - const customObject2 = { - name: 20, - age: 30, - }; - const firstRecord = sqsRecordFactory(JSON.stringify(customObject1)); - const secondRecord = sqsRecordFactory(JSON.stringify(customObject2)); - - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.SQS, { - schema: customSchema, - }); - - // Act - processor.register( - records, - async ( - customObject: SQSRecord & { body: z.infer } - ) => { - return customObject.body; + DynamoDB: { + schema: DynamoDBMarshalled(customSchema), }, - options - ); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Extended DynamoDB Record Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, - ], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); - const firstRecord = dynamodbRecordFactory('success'); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); - const { DynamoDBMarshalled } = await import( - '@aws-lambda-powertools/parser/helpers/dynamodb' - ); - const { DynamoDBStreamRecord, DynamoDBStreamChangeRecordBase } = - await import('@aws-lambda-powertools/parser/schemas/dynamodb'); - const extendedSchema = DynamoDBStreamRecord.extend({ - dynamodb: DynamoDBStreamChangeRecordBase.extend({ - NewImage: DynamoDBMarshalled(customSchema).optional(), - }), - }); - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = dynamodbRecordFactory(2); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: extendedSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema with transformers', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = DynamoDBMarshalled( - z.object({ - Message: z.string(), - }) - ); - - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, - ], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = DynamoDBMarshalled( - z.object({ - Message: z.string(), - }) - ); - - const firstRecord = dynamodbRecordFactory('success'); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = DynamoDBMarshalled( - z.object({ - Message: z.string(), - }) - ); - - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = dynamodbRecordFactory(2); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Internal DynamoDB Record Schema without transformers', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); - - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.dynamodb?.SequenceNumber }, - ], - }); - }); - - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); - - const firstRecord = dynamodbRecordFactory('success'); - const secondRecord = dynamodbRecordFactory('success'); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); + }, + ]; + describe.each(cases)('SQS Record Schema $description', ({ SQS }) => { + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const firstRecord = sqsRecordFactory(JSON.stringify(successPayload1)); + const secondRecord = sqsRecordFactory(JSON.stringify(successPayload2)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: SQS.schema, + }); - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.object({ - Message: z.string(), - }); + // Act + processor.register(records, sqsRecordHandler, options); + const processedMessages = await processor.process(); - //@ts-expect-error Passing a number - const firstRecord = dynamodbRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = dynamodbRecordFactory(2); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.DynamoDBStreams, { - schema: customSchema, + // Assess + expect(processedMessages).toStrictEqual([ + ['success', successPayload1, firstRecord], + ['success', successPayload2, secondRecord], + ]); }); - // Act - processor.register(records, asyncDynamodbRecordHandler, options); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); - }); - }); - - describe('Batch processing with Parser Integration: Passing Extended Kinesis Record Schema', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, - }); + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const firstRecord = sqsRecordFactory(JSON.stringify(successPayload1)); + const secondRecord = sqsRecordFactory(JSON.stringify(failurePayload1)); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: SQS.schema, + }); - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); + // Act + processor.register(records, sqsRecordHandler, options); + const processedMessages = await processor.process(); - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.kinesis.sequenceNumber }, - ], + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + successPayload1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [{ itemIdentifier: secondRecord.messageId }], + }); }); - }); - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); - const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, - }); + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const firstRecord = sqsRecordFactory(JSON.stringify(failurePayload1)); + const secondRecord = sqsRecordFactory(JSON.stringify(failurePayload2)); - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.SQS, { + schema: SQS.schema, + }); - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); + // Act + processor.register(records, sqsRecordHandler, options); - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.string(); - const { Base64Encoded } = await import( - '@aws-lambda-powertools/parser/helpers' - ); - const { KinesisDataStreamRecord, KinesisDataStreamRecordPayload } = - await import('@aws-lambda-powertools/parser/schemas/kinesis'); - const extendedSchema = KinesisDataStreamRecord.extend({ - kinesis: KinesisDataStreamRecordPayload.extend({ - data: Base64Encoded(customSchema).optional(), - }), - }); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = kinesisRecordFactory(1); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: extendedSchema, + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); }); - - // Act - processor.register(records, asyncKinesisRecordHandler, options); - - // Assess - await expect(processor.process()).rejects.toThrowError( - FullBatchFailureError - ); }); - }); - describe('Batch processing with Parser Integration: Passing Internal Kinesis Record Schema with transformers', () => { - it('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = Base64Encoded(z.string()); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, - }); + describe.each(cases)( + 'Kinesis Record Schema $description', + ({ Kinesis }) => { + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const firstRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(successPayload1)).toString('base64') + ); + const secondRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(successPayload2)).toString('base64') + ); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: Kinesis.schema, + }); + + // Act + processor.register(records, kinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', successPayload1, firstRecord], + ['success', successPayload2, secondRecord], + ]); + }); - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const firstRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(successPayload1)).toString('base64') + ); + const secondRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(failurePayload1)).toString('base64') + ); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: Kinesis.schema, + }); + + // Act + processor.register(records, kinesisRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + successPayload1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: secondRecord.kinesis.sequenceNumber }, + ], + }); + }); - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.kinesis.sequenceNumber }, - ], - }); - }); + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + const firstRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(failurePayload1)).toString('base64') + ); + const secondRecord = kinesisRecordFactory( + Buffer.from(JSON.stringify(failurePayload2)).toString('base64') + ); + + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.KinesisDataStreams, { + schema: Kinesis.schema, + }); + + // Act + processor.register(records, sqsRecordHandler, options); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + } + ); - it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { - // Prepare - const customSchema = Base64Encoded(z.string()); - const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, - }); + describe.each(cases)( + 'DynamoDB Record Schema $description', + ({ DynamoDB }) => { + it('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + // Prepare + const firstRecord = dynamodbRecordFactory(successPayload1.Message); + const secondRecord = dynamodbRecordFactory(successPayload2.Message); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: DynamoDB.schema, + }); + + // Act + processor.register(records, dynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages).toStrictEqual([ + ['success', successPayload1, firstRecord], + ['success', successPayload2, secondRecord], + ]); + }); - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); + it('completes the processing with failures if some of the payload does not match the passed schema', async () => { + // Prepare + const firstRecord = dynamodbRecordFactory(successPayload1.Message); + //@ts-expect-error Passing an invalid payload for testing + const secondRecord = dynamodbRecordFactory(failurePayload1.Message); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: DynamoDB.schema, + }); + + // Act + processor.register(records, dynamodbRecordHandler, options); + const processedMessages = await processor.process(); + + // Assess + expect(processedMessages[0]).toStrictEqual([ + 'success', + successPayload1, + firstRecord, + ]); + expect(processor.failureMessages.length).toBe(1); + expect(processor.response()).toStrictEqual({ + batchItemFailures: [ + { itemIdentifier: secondRecord.dynamodb?.SequenceNumber }, + ], + }); + }); - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); + it('completes processing with all failures if all the payload does not match the passed schema', async () => { + // Prepare + //@ts-expect-error Passing an invalid payload for testing + const firstRecord = dynamodbRecordFactory(failurePayload1.Message); + //@ts-expect-error Passing an invalid payload for testing + const secondRecord = dynamodbRecordFactory(failurePayload2.Message); + const records = [firstRecord, secondRecord]; + const processor = new BatchProcessor(EventType.DynamoDBStreams, { + schema: DynamoDB.schema, + }); + + // Act + processor.register(records, dynamodbRecordHandler, options); + + // Assess + await expect(processor.process()).rejects.toThrowError( + FullBatchFailureError + ); + }); + } + ); - it('completes processing with all failures if all the payload does not match the passed schema', async () => { + it('completes processing with all failures if an unsupported event type is used for parsing', async () => { // Prepare - const customSchema = Base64Encoded(z.string()); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = kinesisRecordFactory(1); + const firstRecord = sqsRecordFactory(JSON.stringify(successPayload1)); + const secondRecord = sqsRecordFactory(JSON.stringify(successPayload2)); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { + //@ts-expect-error + const processor = new BatchProcessor('invalid-event-type', { schema: customSchema, }); // Act - processor.register(records, asyncKinesisRecordHandler, options); + processor.register(records, sqsRecordHandler, options); // Assess await expect(processor.process()).rejects.toThrowError( FullBatchFailureError ); }); - }); - - describe('Batch processing with Parser Integration: Passing Internal Kinesis Record Schema without transformers', () => { - it.skip('completes the processing with failures if some of the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.string(); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, - }); - - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages[1]).toStrictEqual([ - 'success', - 'success', - secondRecord, - ]); - expect(processor.failureMessages.length).toBe(1); - expect(processor.response()).toStrictEqual({ - batchItemFailures: [ - { itemIdentifier: firstRecord.kinesis.sequenceNumber }, - ], - }); - }); - it.skip('completes the processing with no failures and parses the payload before passing to the record handler', async () => { + it('completes processing with failures if an unsupported schema type is used for parsing', async () => { // Prepare - const customSchema = z.string(); - const firstRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const secondRecord = kinesisRecordFactory('c3VjY2Vzcw=='); - const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, + const unsupportedSchema = v.object({ + Message: v.string(), }); - - // Act - processor.register(records, asyncKinesisRecordHandler, options); - const processedMessages = await processor.process(); - - // Assess - expect(processedMessages).toStrictEqual([ - ['success', 'success', firstRecord], - ['success', 'success', secondRecord], - ]); - }); - - it('completes processing with all failures if all the payload does not match the passed schema', async () => { - // Prepare - const customSchema = z.string(); - //@ts-expect-error Passing a number - const firstRecord = kinesisRecordFactory(1); - //@ts-expect-error Passing a number - const secondRecord = kinesisRecordFactory(1); + const firstRecord = sqsRecordFactory(JSON.stringify(successPayload1)); + const secondRecord = sqsRecordFactory(JSON.stringify(successPayload2)); const records = [firstRecord, secondRecord]; - const processor = new BatchProcessor(EventType.KinesisDataStreams, { - schema: customSchema, + const processor = new BatchProcessor(EventType.SQS, { + schema: unsupportedSchema, }); // Act - processor.register(records, asyncKinesisRecordHandler, options); + processor.register(records, sqsRecordHandler, options); // Assess await expect(processor.process()).rejects.toThrowError( From b314fb7ee5eed98d67eec63fcade2cfa7f3cbb77 Mon Sep 17 00:00:00 2001 From: Swopnil Dangol Date: Mon, 8 Sep 2025 08:27:44 +0100 Subject: [PATCH 19/19] Fixed formatting and documentation issues --- packages/batch/src/BasePartialProcessor.ts | 18 ++-- packages/batch/src/BatchProcessor.ts | 108 +++++++++++---------- packages/batch/src/constants.ts | 6 +- packages/batch/src/types.ts | 11 +++ 4 files changed, 78 insertions(+), 65 deletions(-) diff --git a/packages/batch/src/BasePartialProcessor.ts b/packages/batch/src/BasePartialProcessor.ts index 1640cc4355..23b7cdc9cc 100644 --- a/packages/batch/src/BasePartialProcessor.ts +++ b/packages/batch/src/BasePartialProcessor.ts @@ -74,8 +74,8 @@ abstract class BasePartialProcessor { * This method should be called when a record fails processing so that * the processor can keep track of the error and the record that failed. * - * @param record Record that failed processing - * @param error Error that was thrown + * @param record - Record that failed processing + * @param error - Error that was thrown */ public failureHandler( record: EventSourceDataClassTypes, @@ -131,7 +131,7 @@ abstract class BasePartialProcessor { * This is to ensure that the processor keeps track of the results and the records * that succeeded and failed processing. * - * @param record Record to be processed + * @param record - Record to be processed */ public abstract processRecord( record: BaseRecord @@ -149,7 +149,7 @@ abstract class BasePartialProcessor { * This is to ensure that the processor keeps track of the results and the records * that succeeded and failed processing. * - * @param record Record to be processed + * @param record - Record to be processed */ public abstract processRecordSync( record: BaseRecord @@ -198,9 +198,9 @@ abstract class BasePartialProcessor { * to allow for reusing the processor instance across multiple invocations * by instantiating the processor outside of the Lambda function handler. * - * @param records Array of records to be processed - * @param handler CallableFunction to process each record from the batch - * @param options Options to be used during processing (optional) + * @param records - Array of records to be processed + * @param handler - CallableFunction to process each record from the batch + * @param options - Options to be used during processing (optional) */ public register( records: BaseRecord[], @@ -223,8 +223,8 @@ abstract class BasePartialProcessor { * This method should be called when a record succeeds processing so that * the processor can keep track of the result and the record that succeeded. * - * @param record Record that succeeded processing - * @param result Result from record handler + * @param record - Record that succeeded processing + * @param result - Result from record handler */ public successHandler( record: EventSourceDataClassTypes, diff --git a/packages/batch/src/BatchProcessor.ts b/packages/batch/src/BatchProcessor.ts index e5768dc403..f6221e9e64 100644 --- a/packages/batch/src/BatchProcessor.ts +++ b/packages/batch/src/BatchProcessor.ts @@ -1,12 +1,7 @@ import type { StandardSchemaV1 } from '@standard-schema/spec'; -import type { - DynamoDBRecord, - KinesisStreamRecord, - SQSRecord, - StreamRecord, -} from 'aws-lambda'; +import type { StreamRecord } from 'aws-lambda'; import { BasePartialBatchProcessor } from './BasePartialBatchProcessor.js'; -import { EventType, SchemaType } from './constants.js'; +import { EventType, SchemaVendor } from './constants.js'; import { BatchProcessingError } from './errors.js'; import type { BaseRecord, @@ -92,7 +87,7 @@ import type { * }); * ``` * - * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) + * @param eventType - The type of event to process (SQS, Kinesis, DynamoDB) */ class BatchProcessor extends BasePartialBatchProcessor { /** @@ -107,7 +102,7 @@ class BatchProcessor extends BasePartialBatchProcessor { * If the handler function completes successfully, the method returns a success response. * Otherwise, it returns a failure response with the error that occurred during processing. * - * @param record The record to be processed + * @param record - The record to be processed */ public async processRecord( record: BaseRecord @@ -116,7 +111,7 @@ class BatchProcessor extends BasePartialBatchProcessor { const recordToProcess = this.schema == null ? record - : await this.parseRecord(record, this.eventType, this.schema); + : await this.#parseRecord(record, this.eventType, this.schema); const data = this.toBatchType(recordToProcess, this.eventType); const result = await this.handler(data, this.options?.context); @@ -129,7 +124,7 @@ class BatchProcessor extends BasePartialBatchProcessor { /** * @throws {BatchProcessingError} This method is not implemented for synchronous processing. * - * @param _record The record to be processed + * @param _record - The record to be processed */ public processRecordSync( _record: BaseRecord @@ -140,20 +135,21 @@ class BatchProcessor extends BasePartialBatchProcessor { } /** - * Create an extended schema according to the event type passed. + * Extend the schema according to the event type passed. * - * If useTransformers is true, parsing with transformers - * else parse without transformers + * If useTransformers is true, extend using opinionated transformers. + * Otherwise, extend without any transformers. * - * @param eventType The type of event to process (SQS, Kinesis, DynamoDB) - * @param schema The StandardSchema to be used for parsing - * @param useTransformers Whether to use transformers for parsing + * @param eventType - The type of event to process (SQS, Kinesis, DynamoDB) + * @param schema - The StandardSchema to be used for parsing + * @param useTransformers - Whether to use transformers for parsing */ - private async createExtendedSchema( - eventType: keyof typeof EventType, - schema: StandardSchemaV1, - useTransformers: boolean - ) { + async #createExtendedSchema(options: { + eventType: keyof typeof EventType; + schema: StandardSchemaV1; + useTransformers: boolean; + }) { + const { eventType, schema, useTransformers } = options; switch (eventType) { case EventType.SQS: { if (useTransformers) { @@ -233,18 +229,18 @@ class BatchProcessor extends BasePartialBatchProcessor { } /** - * Parse the record according to the schema passed. + * Parse the record according to the schema and event type passed. * * If the passed schema is already an extended schema, - * it directly uses the schema to parse the record + * use the schema directly to parse the record. * - * Only Zod Schemas are supported for automatic schema extension + * Only Zod Schemas are supported for schema extension. * - * @param record The record to be parsed - * @param eventType The type of event to process - * @param schema The StandardSchema to be used for parsing + * @param record - The record to be parsed + * @param eventType - The type of event to process + * @param schema - The StandardSchema to be used for parsing */ - private async parseRecord( + async #parseRecord( record: EventSourceDataClassTypes, eventType: keyof typeof EventType, schema: StandardSchemaV1 @@ -256,37 +252,43 @@ class BatchProcessor extends BasePartialBatchProcessor { return extendedSchemaParsing.data as EventSourceDataClassTypes; } // Only proceed with schema extension if it's a Zod schema - if (schema['~standard'].vendor !== SchemaType.Zod) { + if (schema['~standard'].vendor !== SchemaVendor.Zod) { console.warn( 'The schema provided is not supported. Only Zod schemas are supported for extension.' ); throw new Error('Unsupported schema type'); } // Handle schema extension based on event type - try { - // Try without transformers first, then with transformers - const extendedSchemaWithoutTransformers = await this.createExtendedSchema( - eventType, - schema, - false - ); - return parse( - record, - undefined, - extendedSchemaWithoutTransformers - ) as EventSourceDataClassTypes; - } catch { - const extendedSchemaWithTransformers = await this.createExtendedSchema( - eventType, - schema, - true - ); - return parse( - record, - undefined, - extendedSchemaWithTransformers - ) as EventSourceDataClassTypes; + // Try without transformers first, then with transformers + const schemaWithoutTransformers = await this.#createExtendedSchema({ + eventType, + schema, + useTransformers: false, + }); + const schemaWithoutTransformersParsing = parse( + record, + undefined, + schemaWithoutTransformers, + true + ); + if (schemaWithoutTransformersParsing.success) { + return schemaWithoutTransformersParsing.data as EventSourceDataClassTypes; + } + const schemaWithTransformers = await this.#createExtendedSchema({ + eventType, + schema, + useTransformers: true, + }); + const schemaWithTransformersParsing = parse( + record, + undefined, + schemaWithTransformers, + true + ); + if (schemaWithTransformersParsing.success) { + return schemaWithTransformersParsing.data as EventSourceDataClassTypes; } + throw new Error('Failed to parse record'); } } diff --git a/packages/batch/src/constants.ts b/packages/batch/src/constants.ts index 7e8ee07c9a..f6827774ef 100644 --- a/packages/batch/src/constants.ts +++ b/packages/batch/src/constants.ts @@ -18,9 +18,9 @@ const EventType = { } as const; /** - * Enum of supported schema types for the utility + * Enum of supported schema vendors for the utility */ -const SchemaType = { +const SchemaVendor = { Zod: 'zod', } as const; @@ -42,4 +42,4 @@ const DATA_CLASS_MAPPING = { record as DynamoDBRecord, }; -export { EventType, SchemaType, DEFAULT_RESPONSE, DATA_CLASS_MAPPING }; +export { EventType, SchemaVendor, DEFAULT_RESPONSE, DATA_CLASS_MAPPING }; diff --git a/packages/batch/src/types.ts b/packages/batch/src/types.ts index 62820df23a..b5781c44c8 100644 --- a/packages/batch/src/types.ts +++ b/packages/batch/src/types.ts @@ -91,7 +91,18 @@ type PartialItemFailures = { itemIdentifier: string }; */ type PartialItemFailureResponse = { batchItemFailures: PartialItemFailures[] }; +/** + * Type representing the configuration options passed to the BasePartialBatchProcessor class. + * + * @property schema - The schema to be used for parsing + */ type BasePartialBatchProcessorConfig = { + /** + * The schema be either of the following: + * 1. An internal schema of the payload of the supported event types. + * 2. An internal schema along with helper transformer functions. + * 3. An extended schema of the supported event type. + */ schema: StandardSchemaV1; };