diff --git a/README.md b/README.md index 0153a827..bbcf344c 100644 --- a/README.md +++ b/README.md @@ -50,6 +50,12 @@ AWS link: https://d1gwt3w78t4dm3.cloudfront.net Vercel link: https://open-next.vercel.app +## Configuration + +### Environment variables + +- DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY: The number of concurrent batch write commands to DynamoDB. Defaults to 4 in an effort to leave plenty of DynamoDB write request capacity for the production load. + ## Contribute To run `OpenNext` locally: diff --git a/packages/open-next/src/adapters/constants.ts b/packages/open-next/src/adapters/constants.ts index 35793022..fe188699 100644 --- a/packages/open-next/src/adapters/constants.ts +++ b/packages/open-next/src/adapters/constants.ts @@ -1 +1,25 @@ export const MAX_DYNAMO_BATCH_WRITE_ITEM_COUNT = 25; + +/** + * Sending to dynamo X commands at a time, using about X * 25 write units per batch to not overwhelm DDB + * and give production plenty of room to work with. With DDB Response times, you can expect about 10 batches per second. + */ +const DEFAULT_DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY = 4; + +export const getDynamoBatchWriteCommandConcurrency = (): number => { + const dynamoBatchWriteCommandConcurrencyFromEnv = + process.env.DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY; + const parsedDynamoBatchWriteCommandConcurrencyFromEnv = + dynamoBatchWriteCommandConcurrencyFromEnv + ? parseInt(dynamoBatchWriteCommandConcurrencyFromEnv) + : undefined; + + if ( + parsedDynamoBatchWriteCommandConcurrencyFromEnv && + !isNaN(parsedDynamoBatchWriteCommandConcurrencyFromEnv) + ) { + return parsedDynamoBatchWriteCommandConcurrencyFromEnv; + } + + return DEFAULT_DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY; +}; diff --git a/packages/open-next/src/adapters/dynamo-provider.ts b/packages/open-next/src/adapters/dynamo-provider.ts index 5e740f4b..d2e677c5 100644 --- a/packages/open-next/src/adapters/dynamo-provider.ts +++ b/packages/open-next/src/adapters/dynamo-provider.ts @@ -5,7 +5,10 @@ import { import { CdkCustomResourceEvent, CdkCustomResourceResponse } from "aws-lambda"; import { readFileSync } from "fs"; -import { MAX_DYNAMO_BATCH_WRITE_ITEM_COUNT } from "./constants.js"; +import { + getDynamoBatchWriteCommandConcurrency, + MAX_DYNAMO_BATCH_WRITE_ITEM_COUNT, +} from "./constants.js"; import { chunk } from "./util.js"; const PHYSICAL_RESOURCE_ID = "dynamodb-cache"; @@ -36,12 +39,6 @@ export async function handler( } } -/** - * Sending to dynamo X commands at a time, using about X * 25 write units per batch to not overwhelm DDB - * and give production plenty of room to work with. With DDB Response times, you can expect about 10 batches per second. - */ -const DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY = 4; - async function insert(): Promise { const tableName = process.env.CACHE_DYNAMO_TABLE!; @@ -65,7 +62,7 @@ async function insert(): Promise { const paramsChunks = chunk( batchWriteParamsArray, - DYNAMO_BATCH_WRITE_COMMAND_CONCURRENCY, + getDynamoBatchWriteCommandConcurrency(), ); for (const paramsChunk of paramsChunks) {