Created
June 20, 2024 01:06
-
-
Save Tombarr/9f866b9ffde2005d850292739d91750d to your computer and use it in GitHub Desktop.
AWS SDK v3 - S3 Multipart Upload
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const { | |
CreateMultipartUploadCommand, | |
UploadPartCommand, | |
CompleteMultipartUploadCommand, | |
AbortMultipartUploadCommand, | |
S3Client, | |
} = require('@aws-sdk/client-s3'); | |
const path = require('path'); | |
const fs = require('fs'); | |
const crypto = require('crypto'); | |
const twentyFiveMB = 25 * 1024 * 1024; | |
const ContentType = 'application/sql'; | |
const BUCKET = 'my-aws-bucket'; | |
const S3 = new S3Client({ | |
region: 'us-east-1', | |
}); | |
export async function uploadDatabase(filePath) { | |
const fileName = path.basename(filePath); | |
let uploadId; | |
try { | |
const multipartUpload = await S3.send( | |
new CreateMultipartUploadCommand({ | |
Bucket: BUCKET, | |
Key: fileName, | |
ChecksumAlgorithm: 'SHA256', | |
ContentType, | |
}), | |
); | |
const uploadResults = []; | |
uploadId = multipartUpload.UploadId; | |
const hash = crypto.createHash('sha256'); | |
const readStream = fs.createReadStream(filePath, { highWaterMark: twentyFiveMB }); | |
// Process one chunk at a time to avoid OutOfMemoryError | |
let chunkIndex = 0; | |
let totalSize = 0; | |
for await (const chunk of readStream) { | |
const chunkSize = chunk.length || chunk.byteLength; | |
totalSize += chunkSize; | |
// Calculate SHA256 checksums | |
const chunkHash = crypto.createHash('sha256'); | |
hash.update(chunk); | |
chunkHash.update(chunk); | |
uploadResults.push( | |
await S3.send( | |
new UploadPartCommand({ | |
Bucket: BUCKET, | |
Key: fileName, | |
UploadId: uploadId, | |
Body: chunk, | |
ChecksumSHA256: chunkHash.digest('base64'), | |
ChecksumAlgorithm: 'SHA256', | |
PartNumber: ++chunkIndex, | |
ContentType, | |
}), | |
)); | |
} | |
const checksum = hash.digest('base64'); | |
if (!readStream.closed) { | |
readStream.close(); | |
} | |
// Error - BadDigest: The sha256 you specified did not match the calculated checksum | |
return await S3.send( | |
new CompleteMultipartUploadCommand({ | |
Bucket: BUCKET, | |
Key: fileName, | |
UploadId: uploadId, | |
ChecksumSHA256: checksum, | |
MultipartUpload: { | |
Parts: uploadResults.map(({ ETag, ChecksumSHA256 }, i) => ({ | |
ETag, | |
ChecksumSHA256, | |
PartNumber: i + 1, | |
})), | |
}, | |
}), | |
); | |
} catch (err) { | |
if (uploadId) { | |
await S3.send(new AbortMultipartUploadCommand({ | |
Bucket: BUCKET, | |
Key: fileName, | |
UploadId: uploadId, | |
})); | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment