关于javascript:使用节点js中的AWS sdk将整个目录树上传到S3

Upload entire directory tree to S3 using AWS sdk in node js

我目前使用如下方法将单个对象上传到S3:

1
2
3
4
5
6
7
8
9
10
11
var options = {
        Bucket: bucket,
        Key: s3Path,
        Body: body,
        ACL: s3FilePermissions
};

S3.putObject(options,
function (err, data) {
    //console.log(data);
});

但是,例如,当我有一个大的资源文件夹时,我使用了aws cli工具。我想知道,有没有一种本地方法可以使用AWS SDK(将整个文件夹上载到S3)执行相同的操作?


老派的递归方式,我匆匆忙忙地跳起来。仅使用核心节点模块和标准的AWS SDK。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
var AWS = require('aws-sdk');
var path = require("path");
var fs = require('fs');

const uploadDir = function(s3Path, bucketName) {

    let s3 = new AWS.S3();

    function walkSync(currentDirPath, callback) {
        fs.readdirSync(currentDirPath).forEach(function (name) {
            var filePath = path.join(currentDirPath, name);
            var stat = fs.statSync(filePath);
            if (stat.isFile()) {
                callback(filePath, stat);
            } else if (stat.isDirectory()) {
                walkSync(filePath, callback);
            }
        });
    }

    walkSync(s3Path, function(filePath, stat) {
        let bucketPath = filePath.substring(s3Path.length+1);
        let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) };
        s3.putObject(params, function(err, data) {
            if (err) {
                console.log(err)
            } else {
                console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName);
            }
        });

    });
};

uploadDir("path to your folder","your bucket name");

特别感谢本帖的阿里帮助我们获取文件名


前几天我正考虑这个问题,想的是:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
...    
var async = require('async'),
    fs = require('fs'),
    path = require("path");

var directoryName = './test',
    directoryPath = path.resolve(directoryName);

var files = fs.readdirSync(directoryPath);
async.map(files, function (f, cb) {
    var filePath = path.join(directoryPath, f);

    var options = {
        Bucket: bucket,
        Key: s3Path,
        Body: fs.readFileSync(filePath),
        ACL: s3FilePermissions
    };

    S3.putObject(options, cb);

}, function (err, results) {
    if (err) console.error(err);
    console.log(results);
});


以下是@jim解决方案的清理/调试/工作版本

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
    function uploadArtifactsToS3() {
  const artifactFolder = `logs/${config.log}/test-results`;
  const testResultsPath = './test-results';

  const walkSync = (currentDirPath, callback) => {
    fs.readdirSync(currentDirPath).forEach((name) => {
      const filePath = path.join(currentDirPath, name);
      const stat = fs.statSync(filePath);
      if (stat.isFile()) {
        callback(filePath, stat);
      } else if (stat.isDirectory()) {
        walkSync(filePath, callback);
      }
    });
  };

  walkSync(testResultsPath, async (filePath) => {
    let bucketPath = filePath.substring(testResultsPath.length - 1);
    let params = {
      Bucket: process.env.SOURCE_BUCKET,
      Key: `${artifactFolder}/${bucketPath}`,
      Body: fs.readFileSync(filePath)
    };
    try {
      await s3.putObject(params).promise();
      console.log(`Successfully uploaded ${bucketPath} to s3 bucket`);
    } catch (error) {
      console.error(`error in uploading ${bucketPath} to s3 bucket`);
      throw new Error(`error in uploading ${bucketPath} to s3 bucket`);
    }
  });
}

您可以尝试使用node-s3-client。

更新:在NPM上提供

从同步A目录到S3文档:

更新:添加了客户端初始化代码。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
var client = s3.createClient({
    maxAsyncS3: 20,     // this is the default
    s3RetryCount: 3,    // this is the default
    s3RetryDelay: 1000, // this is the default
    multipartUploadThreshold: 20971520, // this is the default (20 MB)
    multipartUploadSize: 15728640, // this is the default (15 MB)
    s3Options: {
      accessKeyId:"YOUR ACCESS KEY",
      secretAccessKey:"YOUR SECRET ACCESS KEY"
    }
  });

var params = {
  localDir:"some/local/dir",
  deleteRemoved: true, // default false, whether to remove s3 objects
                       // that have no corresponding local file.

  s3Params: {
    Bucket:"s3 bucket name",
    Prefix:"some/remote/dir/",
    // other options supported by putObject, except Body and ContentLength.
    // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
  },
};
var uploader = client.uploadDir(params);
uploader.on('error', function(err) {
  console.error("unable to sync:", err.stack);
});
uploader.on('progress', function() {
  console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
  console.log("done uploading");
});