Download the entire directory tree on S3 using AWS sdk in node js

I am currently loading individual objects on S3 using this:

var options = { Bucket: bucket, Key: s3Path, Body: body, ACL: s3FilePermissions }; S3.putObject(options, function (err, data) { //console.log(data); }); 

But when I have a folder with large resources, I use the tool

+13
source share
4 answers

The recursive path from the old school, I was in a hurry. Uses only node kernel modules and standard AWS sdk.

 var AWS = require('aws-sdk'); var path = require("path"); var fs = require('fs'); const uploadDir = function(s3Path, bucketName) { let s3 = new AWS.S3(); function walkSync(currentDirPath, callback) { fs.readdirSync(currentDirPath).forEach(function (name) { var filePath = path.join(currentDirPath, name); var stat = fs.statSync(filePath); if (stat.isFile()) { callback(filePath, stat); } else if (stat.isDirectory()) { walkSync(filePath, callback); } }); } walkSync(s3Path, function(filePath, stat) { let bucketPath = filePath.substring(s3Path.length+1); let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) }; s3.putObject(params, function(err, data) { if (err) { console.log(err) } else { console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName); } }); }); }; uploadDir("path to your folder", "your bucket name"); 

Ali's special thanks to this post for helping me get the file names.

+9
source

I was just looking at this problem the other day and was thinking of something like this:

 ... var async = require('async'), fs = require('fs'), path = require("path"); var directoryName = './test', directoryPath = path.resolve(directoryName); var files = fs.readdirSync(directoryPath); async.map(files, function (f, cb) { var filePath = path.join(directoryPath, f); var options = { Bucket: bucket, Key: s3Path, Body: fs.readFileSync(filePath), ACL: s3FilePermissions }; S3.putObject(options, cb); }, function (err, results) { if (err) console.error(err); console.log(results); }); 
+3
source

You can try node-s3-client .

UPDATE: available on npm here

From directory synchronization to s3 docs :

UPDATE: client initialization code added.

 var client = s3.createClient({ maxAsyncS3: 20, // this is the default s3RetryCount: 3, // this is the default s3RetryDelay: 1000, // this is the default multipartUploadThreshold: 20971520, // this is the default (20 MB) multipartUploadSize: 15728640, // this is the default (15 MB) s3Options: { accessKeyId: "YOUR ACCESS KEY", secretAccessKey: "YOUR SECRET ACCESS KEY" } }); var params = { localDir: "some/local/dir", deleteRemoved: true, // default false, whether to remove s3 objects // that have no corresponding local file. s3Params: { Bucket: "s3 bucket name", Prefix: "some/remote/dir/", // other options supported by putObject, except Body and ContentLength. // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property }, }; var uploader = client.uploadDir(params); uploader.on('error', function(err) { console.error("unable to sync:", err.stack); }); uploader.on('progress', function() { console.log("progress", uploader.progressAmount, uploader.progressTotal); }); uploader.on('end', function() { console.log("done uploading"); }); 
+1
source

here is the cleaned / debugged / working version of @Jim solution

  function uploadArtifactsToS3() { const artifactFolder = 'logs/${config.log}/test-results'; const testResultsPath = './test-results'; const walkSync = (currentDirPath, callback) => { fs.readdirSync(currentDirPath).forEach((name) => { const filePath = path.join(currentDirPath, name); const stat = fs.statSync(filePath); if (stat.isFile()) { callback(filePath, stat); } else if (stat.isDirectory()) { walkSync(filePath, callback); } }); }; walkSync(testResultsPath, async (filePath) => { let bucketPath = filePath.substring(testResultsPath.length - 1); let params = { Bucket: process.env.SOURCE_BUCKET, Key: '${artifactFolder}/${bucketPath}', Body: fs.readFileSync(filePath) }; try { await s3.putObject(params).promise(); console.log('Successfully uploaded ${bucketPath} to s3 bucket'); } catch (error) { console.error('error in uploading ${bucketPath} to s3 bucket'); throw new Error('error in uploading ${bucketPath} to s3 bucket'); } }); } 
0
source

Source: https://habr.com/ru/post/980208/


All Articles