From 4854e6c4ecef2a6f85872b5770d76c56ae129e29 Mon Sep 17 00:00:00 2001 From: Boris Lepikhin Date: Fri, 19 Jul 2024 17:34:43 -0700 Subject: [PATCH] Chunk deleted files into batches S3 has a batch delete limit of 1000 files. The PR introduces chunking `keysToDelete` into batches of 1000 files to bypass the limitation. --- src/utils/s3-sync.ts | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/src/utils/s3-sync.ts b/src/utils/s3-sync.ts index 2bdbc177..8f5a7646 100644 --- a/src/utils/s3-sync.ts +++ b/src/utils/s3-sync.ts @@ -76,13 +76,24 @@ export async function s3Sync({ const targetKeys = filesToUpload.map((file) => targetPathPrefix !== undefined ? path.posix.join(targetPathPrefix, file) : file ); + const keysToDelete = findKeysToDelete(Object.keys(existingS3Objects), targetKeys); if (keysToDelete.length > 0) { - keysToDelete.map((key) => { - getUtils().log.verbose(`Deleting ${key}`); - fileChangeCount++; - }); - await s3Delete(aws, bucketName, keysToDelete); + const batch = chunk(keysToDelete, 1000); + + await Promise.all( + batch.map(async (keysToDeleteChunk, index) => { + getUtils().log.verbose(`Deleting chunk #${index}`); + + keysToDeleteChunk.map((key) => { + getUtils().log.verbose(`Deleting ${key}`); + fileChangeCount++; + }); + + await s3Delete(aws, bucketName, keysToDeleteChunk); + }) + ); + hasChanges = true; }