Created
September 29, 2021 14:35
-
-
Save itsTeknas/67cfd6c5e9a44ceb06238c0a26698b81 to your computer and use it in GitHub Desktop.
Upload a large file to aws s3 using multipart uploads
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// npm i --save dotenv aws-sdk | |
const dotenv = require('dotenv') | |
const path = require('path') | |
const fs = require('fs') | |
const AWS = require('aws-sdk'); | |
dotenv.config({ | |
path: path.join(__dirname, '.env'), | |
debug: true, | |
}) | |
const S3 = new AWS.S3({ | |
apiVersion: '2006-03-01', | |
accessKeyId: process.env.AWS_ACCESS_KEY_ID, | |
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, | |
region: process.env.AWS_DEFAULT_REGION, | |
}); | |
(async () => { | |
let multipartCreateResult = await S3.createMultipartUpload({ | |
Bucket: process.env.BUCKET, | |
Key: "movie.mp4", | |
ACL: "public-read", | |
ContentType: "video/mp4", | |
StorageClass: 'STANDARD' | |
}).promise() | |
console.log("multipartCreateResult", multipartCreateResult) | |
// process.exit(0) | |
const CHUNK_SIZE = 10 * 1024 * 1024, // 10MB | |
buffer = Buffer.alloc(CHUNK_SIZE), | |
filePath = 'movie.mp4'; | |
let chunkCount = 1; | |
let uploadPartResults = [] | |
fs.open(filePath, 'r', function (err, fd) { | |
if (err) throw err; | |
function readNextChunk() { | |
fs.read(fd, buffer, 0, CHUNK_SIZE, null, async function (err, nread) { | |
if (err) throw err; | |
if (nread === 0) { | |
// done reading file, do any necessary finalization steps | |
console.log("uploadPartResults", uploadPartResults) | |
let completeUploadResponce = await S3.completeMultipartUpload({ | |
Bucket: process.env.BUCKET, | |
Key: "movie.mp4", | |
MultipartUpload: { | |
Parts: uploadPartResults | |
}, | |
UploadId: multipartCreateResult.UploadId | |
}).promise() | |
console.log("completeUploadResponce", completeUploadResponce) | |
fs.close(fd, function (err) { | |
if (err) throw err; | |
}); | |
return; | |
} | |
var data; | |
if (nread < CHUNK_SIZE) { | |
data = buffer.slice(0, nread); | |
} | |
else { | |
data = buffer; | |
} | |
// do something with `data`, then call `readNextChunk();` | |
// console.log("chunkCount: ", chunkCount) | |
let uploadPromiseResult = await S3.uploadPart({ | |
Body: data, | |
Bucket: process.env.BUCKET, | |
Key: "movie.mp4", | |
PartNumber: chunkCount, | |
UploadId: multipartCreateResult.UploadId, | |
}).promise() | |
.catch(err => { | |
console.log(err) | |
}) | |
console.log(`uploadPromiseResult - ${chunkCount}`, uploadPromiseResult) | |
uploadPartResults.push({ | |
PartNumber: chunkCount, | |
ETag: uploadPromiseResult.ETag | |
}) | |
chunkCount++; | |
readNextChunk() | |
}); | |
} | |
readNextChunk(); | |
}); | |
})() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment