Created
September 25, 2018 12:43
-
-
Save lokeshjawane/e181cf28702faaa01c13941bec5e09d9 to your computer and use it in GitHub Desktop.
Script to sync s3 obj to GCS in realtime
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//This script is to copy S3 object to GCS in asynchronously using Lambda function | |
//set var "gcp_proj_id" with value of you GCP project ID | |
//set var "gcp_client_email" with value of your client email address from JSON key file & make sure that user has GCS create object permission | |
//set var "cred_bucket", here provode a s3 bucket from where lambsa will fetch the JSON creds file to GCP auth | |
//Set vat "cred_s3_obj", here you provide a json keyfile name which is uploaded in "cred_bucket" s23 bucket | |
//**NOTE**: set HOME env var with value "/tmp" in lambda function, because google-cloud/storage create file locally & in lambda only /tmp is writable. Run lambda function on S3 create object event based. | |
// | |
'use strict'; | |
const AWS = require('aws-sdk'); | |
const Storage = require('@google-cloud/storage'); | |
exports.handler = (event, context, callback) => { | |
console.log('Copy started'); | |
//initialize variables | |
var srcBucket = event.Records[0].s3.bucket.name; | |
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " ")); | |
//var srcKey = 'Scren (1).png'; | |
var gcp_proj_id = '<GCP project ID as a string>'; | |
var gcp_client_email = '<GCP client email ID from JSON key as a string>'; | |
var cred_bucket = '<S3 bucket where you have uploaded JSON key file>'; | |
var cred_s3_obj = '<JSON key file name>'; | |
var pri; | |
//get AWS s3 object | |
var s3 = new AWS.S3({region: '<AWS region>'}); | |
//fetch gcp JSON key from s3 | |
s3.getObject({Bucket: cred_bucket, Key: cred_s3_obj}, function(err, data) {; | |
var chunk = data.Body.toString(); | |
pri = JSON.parse(chunk).private_key.split("\\n"); | |
//Create GCS object | |
const storage = new Storage({projectId: gcp_proj_id, credentials: { client_email: gcp_client_email, private_key: pri[0]}}); | |
//Get object from s3 | |
s3.getObject({Bucket: srcBucket, Key: srcKey}, function(err, data) { | |
if (err) | |
return err; | |
const bucketName = srcBucket; | |
const bucket = storage.bucket(bucketName); | |
const destination = srcKey; | |
const file1 = bucket.file(destination); | |
const options = { | |
metadata:{ | |
contentType: data.ContentType | |
} | |
} | |
console.log('end line.'); | |
//Upload downloaded object to GCS | |
file1.save(data.Body, options, function(err) { | |
if (!err) { | |
console.log('File written successfully.'); | |
} | |
}); | |
}); | |
}); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment