Skip to content

Commit d7879b9

Browse files
committed
Added docs for parser integration with batch processing
1 parent 2303c7b commit d7879b9

13 files changed

+582
-35
lines changed

docs/features/batch.md

Lines changed: 160 additions & 35 deletions
Large diffs are not rendered by default.
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { DynamoDBMarshalled } from '@aws-lambda-powertools/parser/helpers/dynamodb';
7+
import {
8+
DynamoDBStreamChangeRecordBase,
9+
DynamoDBStreamRecord,
10+
} from '@aws-lambda-powertools/parser/schemas/dynamodb';
11+
import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
12+
import { z } from 'zod';
13+
14+
const customSchema = DynamoDBStreamRecord.extend({
15+
dynamodb: DynamoDBStreamChangeRecordBase.extend({
16+
NewImage: DynamoDBMarshalled(
17+
z.object({
18+
name: z.string(),
19+
age: z.number(),
20+
})
21+
),
22+
}),
23+
});
24+
25+
const processor = new BatchProcessor(EventType.DynamoDBStreams, {
26+
schema: customSchema,
27+
});
28+
29+
const recordHandler = async ({
30+
dynamodb: {
31+
NewImage: { name, age },
32+
},
33+
}: z.infer<typeof customSchema>) => {
34+
// this is safe to use because it's parsed
35+
};
36+
37+
export const handler: DynamoDBStreamHandler = async (event, context) =>
38+
processPartialResponse(event, recordHandler, processor, {
39+
context,
40+
});
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { DynamoDBMarshalled } from '@aws-lambda-powertools/parser/helpers/dynamodb';
7+
import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
8+
import { z } from 'zod';
9+
10+
const customSchema = DynamoDBMarshalled(
11+
z.object({
12+
name: z.string(),
13+
age: z.number(),
14+
})
15+
);
16+
17+
const processor = new BatchProcessor(EventType.DynamoDBStreams, {
18+
schema: customSchema,
19+
});
20+
21+
const recordHandler = async ({
22+
dynamodb: {
23+
NewImage: { name, age },
24+
},
25+
}: DynamoDBRecord & {
26+
dynamodb: { NewImage: z.infer<typeof customSchema> };
27+
}) => {
28+
// this is safe to use because it's parsed
29+
};
30+
31+
export const handler: DynamoDBStreamHandler = async (event, context) =>
32+
processPartialResponse(event, recordHandler, processor, {
33+
context,
34+
});
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import type { DynamoDBRecord, DynamoDBStreamHandler } from 'aws-lambda';
7+
import { z } from 'zod';
8+
9+
const customSchema = z.object({
10+
name: z.string(),
11+
age: z.number(),
12+
});
13+
14+
const processor = new BatchProcessor(EventType.DynamoDBStreams, {
15+
schema: customSchema,
16+
});
17+
18+
const recordHandler = async ({
19+
dynamodb: {
20+
NewImage: { name, age },
21+
},
22+
}: DynamoDBRecord & {
23+
dynamodb: { NewImage: z.infer<typeof customSchema> };
24+
}) => {
25+
// this is safe to use because it's parsed
26+
};
27+
28+
export const handler: DynamoDBStreamHandler = async (event, context) =>
29+
processPartialResponse(event, recordHandler, processor, {
30+
context,
31+
});
Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Base64Encoded } from '@aws-lambda-powertools/parser/helpers';
7+
import {
8+
KinesisDataStreamRecord,
9+
KinesisDataStreamRecordPayload,
10+
} from '@aws-lambda-powertools/parser/schemas/kinesis';
11+
import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
12+
import { z } from 'zod';
13+
14+
const customSchema = KinesisDataStreamRecord.extend({
15+
kinesis: KinesisDataStreamRecordPayload.extend({
16+
data: Base64Encoded(
17+
z.object({
18+
name: z.string(),
19+
age: z.number(),
20+
})
21+
),
22+
}),
23+
});
24+
25+
const processor = new BatchProcessor(EventType.KinesisDataStreams, {
26+
schema: customSchema,
27+
});
28+
29+
const recordHandler = async ({
30+
kinesis: {
31+
data: { name, age },
32+
},
33+
}: z.infer<typeof customSchema>) => {
34+
// this is safe to use because it's parsed
35+
};
36+
37+
export const handler: KinesisStreamHandler = async (event, context) =>
38+
processPartialResponse(event, recordHandler, processor, {
39+
context,
40+
});
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { Base64Encoded } from '@aws-lambda-powertools/parser/helpers';
7+
import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
8+
import { z } from 'zod';
9+
10+
const customSchema = Base64Encoded(
11+
z.object({
12+
name: z.string(),
13+
age: z.number(),
14+
})
15+
);
16+
17+
const processor = new BatchProcessor(EventType.KinesisDataStreams, {
18+
schema: customSchema,
19+
});
20+
21+
const recordHandler = async ({
22+
kinesis: {
23+
data: { name, age },
24+
},
25+
}: KinesisStreamRecord & {
26+
kinesis: { data: z.infer<typeof customSchema> };
27+
}) => {
28+
// this is safe to use because it's parsed
29+
};
30+
31+
export const handler: KinesisStreamHandler = async (event, context) =>
32+
processPartialResponse(event, recordHandler, processor, {
33+
context,
34+
});
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import type { KinesisStreamHandler, KinesisStreamRecord } from 'aws-lambda';
7+
import { z } from 'zod';
8+
9+
const customSchema = z.object({
10+
name: z.string(),
11+
age: z.number(),
12+
});
13+
14+
const processor = new BatchProcessor(EventType.KinesisDataStreams, {
15+
schema: customSchema,
16+
});
17+
18+
const recordHandler = async ({
19+
kinesis: {
20+
data: { name, age },
21+
},
22+
}: KinesisStreamRecord & {
23+
kinesis: { data: z.infer<typeof customSchema> };
24+
}) => {
25+
// this is safe to use because it's parsed
26+
};
27+
28+
export const handler: KinesisStreamHandler = async (event, context) =>
29+
processPartialResponse(event, recordHandler, processor, {
30+
context,
31+
});
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { JSONStringified } from '@aws-lambda-powertools/parser/helpers';
7+
import { SqsRecordSchema } from '@aws-lambda-powertools/parser/schemas';
8+
import type { SQSHandler, SQSRecord } from 'aws-lambda';
9+
import { z } from 'zod';
10+
11+
const customSchema = SqsRecordSchema.extend({
12+
body: JSONStringified(
13+
z.object({
14+
name: z.string(),
15+
age: z.number(),
16+
})
17+
),
18+
});
19+
20+
const processor = new BatchProcessor(EventType.SQS, { schema: customSchema });
21+
22+
const recordHandler = async ({
23+
body: { name, age },
24+
}: z.infer<typeof customSchema>) => {
25+
// this is safe to use because it's parsed
26+
};
27+
28+
export const handler: SQSHandler = async (event, context) =>
29+
processPartialResponse(event, recordHandler, processor, {
30+
context,
31+
});
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import { JSONStringified } from '@aws-lambda-powertools/parser/helpers';
7+
import type { SQSHandler, SQSRecord } from 'aws-lambda';
8+
import { z } from 'zod';
9+
10+
const customSchema = JSONStringified(
11+
z.object({
12+
name: z.string(),
13+
age: z.number(),
14+
})
15+
);
16+
17+
const processor = new BatchProcessor(EventType.SQS, { schema: customSchema });
18+
19+
const recordHandler = async ({
20+
body: { name, age },
21+
}: SQSRecord & { body: z.infer<typeof customSchema> }) => {
22+
// this is safe to use because it's parsed
23+
};
24+
25+
export const handler: SQSHandler = async (event, context) =>
26+
processPartialResponse(event, recordHandler, processor, {
27+
context,
28+
});
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
import {
2+
BatchProcessor,
3+
EventType,
4+
processPartialResponse,
5+
} from '@aws-lambda-powertools/batch';
6+
import type { SQSHandler, SQSRecord } from 'aws-lambda';
7+
import { z } from 'zod';
8+
9+
const customSchema = z.object({
10+
name: z.string(),
11+
age: z.number(),
12+
});
13+
14+
const processor = new BatchProcessor(EventType.SQS, { schema: customSchema });
15+
16+
const recordHandler = async ({
17+
body: { name, age },
18+
}: SQSRecord & { body: z.infer<typeof customSchema> }) => {
19+
// this is safe to use because it's parsed
20+
};
21+
22+
export const handler: SQSHandler = async (event, context) =>
23+
processPartialResponse(event, recordHandler, processor, {
24+
context,
25+
});

0 commit comments

Comments
 (0)