Skip to content

Commit 5a02285

Browse files
authored
SDK maxAttempts now configurable through env, plugin logs now debug (#310)
* Double chunk DDB batch writes to not overwhelm DDB on load
1 parent 3a5755f commit 5a02285

File tree

2 files changed

+37
-16
lines changed

2 files changed

+37
-16
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,8 @@ Vercel link: https://open-next.vercel.app
5454

5555
### Environment variables
5656

57+
- AWS_SDK_DYNAMODB_MAX_ATTEMPTS: The maximum number of times requests that encounter retryable failures should be attempted for DynamoDB. Defaults to 3.
58+
- AWS_SDK_S3_MAX_ATTEMPTS: The maximum number of times requests that encounter retryable failures should be attempted for S3. Defaults to 3.
5759
- DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY: The number of concurrent batch write commands to DynamoDB. Defaults to 4 in an effort to leave plenty of DynamoDB write request capacity for the production load.
5860

5961
## Contribute

packages/open-next/src/adapters/server-adapter.ts

Lines changed: 35 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,20 @@
1-
/* eslint-disable unused-imports/no-unused-imports */
2-
import { DynamoDBClient } from "@aws-sdk/client-dynamodb";
3-
import { S3Client } from "@aws-sdk/client-s3";
1+
import { DynamoDBClient, DynamoDBClientConfig } from "@aws-sdk/client-dynamodb";
2+
import { S3Client, S3ClientConfig } from "@aws-sdk/client-s3";
43

5-
// We load every config here so that they are only loaded once
6-
// and during cold starts
74
import { BuildId } from "./config/index.js";
85
import { awsLogger } from "./logger.js";
96
import { lambdaHandler } from "./plugins/lambdaHandler.js";
107
import { setNodeEnv } from "./util.js";
118

9+
// We load every config here so that they are only loaded once
10+
// and during cold starts
1211
setNodeEnv();
1312
setBuildIdEnv();
1413
setNextjsServerWorkingDirectory();
1514

16-
///////////////////////
17-
// AWS global client //
18-
///////////////////////
15+
////////////////////////
16+
// AWS global clients //
17+
////////////////////////
1918

2019
declare global {
2120
var S3Client: S3Client;
@@ -24,20 +23,40 @@ declare global {
2423

2524
const CACHE_BUCKET_REGION = process.env.CACHE_BUCKET_REGION;
2625

26+
function parseS3ClientConfigFromEnv(): S3ClientConfig {
27+
return {
28+
region: CACHE_BUCKET_REGION,
29+
logger: awsLogger,
30+
maxAttempts: parseNumberFromEnv(process.env.AWS_SDK_S3_MAX_ATTEMPTS),
31+
};
32+
}
33+
34+
function parseDynamoClientConfigFromEnv(): DynamoDBClientConfig {
35+
return {
36+
region: CACHE_BUCKET_REGION,
37+
logger: awsLogger,
38+
maxAttempts: parseNumberFromEnv(process.env.AWS_SDK_DYNAMODB_MAX_ATTEMPTS),
39+
};
40+
}
41+
42+
function parseNumberFromEnv(envValue: string | undefined): number | undefined {
43+
if (typeof envValue !== "string") {
44+
return envValue;
45+
}
46+
47+
const parsedValue = parseInt(envValue);
48+
49+
return isNaN(parsedValue) ? undefined : parsedValue;
50+
}
51+
2752
// Cache clients using global variables
2853
// Note: The clients are used in `cache.ts`. The incremental cache is recreated on
2954
// every request and required on every request (And the require cache is also
3055
// cleared). It was causing some file to stay open which after enough time
3156
// would cause the function to crash with error "EMFILE too many open". It
3257
// was also making the memory grow out of control.
33-
globalThis.S3Client = new S3Client({
34-
region: CACHE_BUCKET_REGION,
35-
logger: awsLogger,
36-
});
37-
globalThis.dynamoClient = new DynamoDBClient({
38-
region: CACHE_BUCKET_REGION,
39-
logger: awsLogger,
40-
});
58+
globalThis.S3Client = new S3Client(parseS3ClientConfigFromEnv());
59+
globalThis.dynamoClient = new DynamoDBClient(parseDynamoClientConfigFromEnv());
4160

4261
/////////////
4362
// Handler //

0 commit comments

Comments
 (0)