Created
May 14, 2018 13:14
-
-
Save shirish47/43a3d60cb0203d62681d6c280ba16b93 to your computer and use it in GitHub Desktop.
lambda reading S3 Bucket files and merging Array data from multiple files to one file and storing back to S3
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
this was a part of a project where we were storing data from AWS IoT | |
and then triggering AWS lambda to datafiles and make them one single CSV file | |
So this might be useful later on. | |
*/ | |
var AWS = require('aws-sdk'); | |
var s3 = new AWS.S3(); | |
var jsonexport = require('jsonexport'); | |
var fs = require('fs'); | |
exports.handler = (event, context, callback) => { | |
// TODO implement | |
console.log("event is :"+JSON.stringify(event)); | |
var src_bkt = event.Records[0].s3.bucket.name; | |
var src_key = event.Records[0].s3.object.key; | |
//console.log("bucket :"+JSON.stringify(src_bkt)); | |
//console.log("key: "+JSON.stringify(src_key)); | |
var fetchFrom; | |
var ouputFilePath; | |
var s3array =[]; | |
var data_x=[]; | |
var data_y=[]; | |
var data_z=[]; | |
var final_xyz=[]; | |
// Retrieve the object | |
s3.getObject({ | |
Bucket: src_bkt, | |
Key: src_key | |
}, function(err, data) { | |
if (err) { | |
console.log(err, err.stack); | |
callback(err); | |
} else { | |
fetchFrom = "Data/"+JSON.parse(data.Body).DeviceId +"/"+ JSON.parse(data.Body).connectionstamp +"/"; | |
ouputFilePath= "MergedCSV/"+JSON.parse(data.Body).DeviceId +"/"+ JSON.parse(data.Body).connectionstamp +"/"; | |
for(var i=0;i<9600;i+=128) | |
{ | |
var file = fetchFrom+`${i}`+".json"; | |
//console.log("file key:"+ file); | |
s3array.push(s3.getObject( | |
{ | |
Bucket: src_bkt, | |
Key: file | |
}).promise() | |
) | |
}//end of loop 1 | |
console.log("S3 Array: "+ s3array); | |
Promise.all(s3array).then( | |
function (data){ | |
data.forEach(function(file){ | |
var fileJSON = JSON.parse(file.Body); | |
console.log("File Data:"+ file.Body.toString('ascii')); | |
data_x.push.apply(data_x,fileJSON.x); | |
data_y.push.apply(data_y,fileJSON.y); | |
data_z.push.apply(data_z,fileJSON.z); | |
}); | |
var len=data_x.length; | |
for(i=0;i<len;i++){ | |
var temp={"t":i*0.8,"x":data_x[i],"y":data_y[i],"z":data_z[i]}; | |
final_xyz.push(temp); | |
} | |
jsonexport(final_xyz,function(err, csv){ | |
if(err) return console.log(err); | |
var params = { | |
Bucket : src_bkt, | |
Key : ouputFilePath+"out.csv", | |
Body : csv | |
} | |
s3.putObject(params, function(err, data) { | |
if (err) console.log(err, err.stack); // an error occurred | |
else console.log(data); // successful response | |
}); | |
}); | |
} | |
) | |
.catch(function(err){ | |
console.log("catch err: "+ err); | |
} | |
); | |
} | |
}); | |
// console.log("Hi shirish here Bucket: "+event.bucket); | |
// console.log("& S3 is: "+event.bucket); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment