Last active
March 14, 2024 18:10
-
-
Save nerycordova/5cf0e169d330d8fbba85529d14907d31 to your computer and use it in GitHub Desktop.
Unzip large files in AWS S3 using Lambda and Node.js
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//Dev.to article: https://dev.to/nerycordova/unzip-large-files-in-aws-using-lambda-and-node-js-cpp | |
const AWS = require("aws-sdk"); | |
const s3 = new AWS.S3({ apiVersion: "2006-03-01" }); | |
const unzipper = require("unzipper"); | |
exports.handler = async (event) => { | |
//...initialize bucket, filename and target_filename here | |
try { | |
/** | |
* Step 1: Get stream of the file to be extracted from the zip | |
*/ | |
const file_stream = s3 | |
.getObject({ Bucket: bucket, Key: filename }) | |
.createReadStream() | |
.on("error", (e) => console.log(`Error extracting file: `, e)) | |
.pipe( | |
unzipper.ParseOne("file_name_inside_zip.ext", { | |
forceStream: true, | |
}) | |
); | |
/** | |
* Step 2: upload extracted stream back to S3: this method supports a readable stream in the Body param as per | |
* https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property | |
*/ | |
await s3 | |
.upload({ Bucket: bucket, Key: target_filename, Body: file_stream }) | |
.promise(); | |
} catch (error) { | |
console.log("Error: ", error.message, error.stack); | |
} | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
@tdough21 hope you were able to solve this back in May. In case not and for the record, in the docs you will see that most of the examples have this condition
if (fileName === "this IS the file I'm looking for")
, see this one for example:So, by design, the library allows you to get all file names inside the .zip package.