Why is S3 booting not loading correctly?

I upload an image file in the following format:

var body = fs.createReadStream(tempPath).pipe(zlib.createGzip());
var s3obj = new AWS.S3({params: {Bucket: myBucket, Key: myKey}});
var params = {
  Body: body,
  ACL: 'public-read',
  ContentType: 'image/png'
};

s3obj.upload(params, function(err, data) {
  if (err) console.log("An error occurred with S3 fig upload: ", err);
  console.log("Uploaded the image file at: ", data.Location);
});

The image successfully loads into my S3 bucket (there are no error messages and I see it on the S3 console), but when I try to display it on my website, it returns a broken img icon. When I upload an image using the S3 console file downloader, I cannot open it with an error that the file is “damaged or damaged”.

If I upload the file manually using the S3 console, I can correctly display it on my website, so I'm sure that something is wrong with the way I upload.

What is going wrong?

+4
source share
2 answers

. , gzip'd ( var body =... zlib.createGzip()). :

var params = {
  Body: body,
  ACL: 'public-read',
  ContentType: 'image/png',
  ContentEncoding: 'gzip'
};
+6

Theres node s3-upload-stream ( ) S3, , :

var AWS      = require('aws-sdk'),
    zlib     = require('zlib'),
    fs       = require('fs');
    s3Stream = require('s3-upload-stream')(new AWS.S3()),

// Set the client to be used for the upload. 
AWS.config.loadFromPath('./config.json');
// or do AWS.config.update({accessKeyId: 'akid', secretAccessKey: 'secret'});

// Create the streams 
var read = fs.createReadStream('/path/to/a/file');
var compress = zlib.createGzip();
var upload = s3Stream.upload({
  "Bucket": "bucket-name",
  "Key": "key-name"
});

// Optional configuration 
upload.maxPartSize(20971520); // 20 MB 
upload.concurrentParts(5);

// Handle errors. 
upload.on('error', function (error) {
  console.log(error);
});

/* Handle progress. Example details object:
   { ETag: '"f9ef956c83756a80ad62f54ae5e7d34b"',
     PartNumber: 5,
     receivedSize: 29671068,
     uploadedSize: 29671068 }
*/
upload.on('part', function (details) {
  console.log(details);
});

/* Handle upload completion. Example details object:
   { Location: 'https://bucketName.s3.amazonaws.com/filename.ext',
     Bucket: 'bucketName',
     Key: 'filename.ext',
     ETag: '"bf2acbedf84207d696c8da7dbb205b9f-5"' }
*/
upload.on('uploaded', function (details) {
  console.log(details);
});

// Pipe the incoming filestream through compression, and up to S3. 
read.pipe(compress).pipe(upload);
0

Source: https://habr.com/ru/post/1616452/


All Articles