Aws-sdk node.js s3 putObject from request body

I am trying to save a PDF to S3 using the AWS-SDK. I receive a PDF through the body of the POST (application / PDF).

When saving the file in local HD with the fs.writeFile file, the file looks fine. But when loading into s3 the file is damaged (one white page pdf)

Welcome any help or hint!

var data = body // body from a POST request. var fileName = "test.pdf"; fs.writeFile(fileName, data, {encoding : "binary"}, function(err, data) { console.log('saved'); // File is OK! }); s3.putObject({ Bucket: "bucketName", Key: fileName, Body: data }, function(err, data) { console.log('uploaded') // File uploads incorrectly. }); 

EDIT:

It works if I write and then read the file and upload it.

 fs.writeFile(fileName, data, {encoding : "binary"}, function(err, data) { fs.readFile(fileName, function(err, fileData) { s3.putObject({ Bucket: "bucketName", Key: fileName, Body: fileData }, function(err, data) { console.log('uploaded') // File uploads correctly. }); }); }); 
+6
source share
4 answers

Try setting contentType and / or ContentEncoding to your put on S3.

  ContentType: 'binary', ContentEncoding: 'utf8' 

See sample code here for a working example putObject makes an object larger on the server in Nodejs

+2
source

I think this is because data is being consumed (i.e. stream).

This explains why, after submitting the data that you did not send anything to S3, and again read the data that you can send a valid PDF.

Try to see if this works by simply sending data directly to S3 without writing it to disk.

+1
source

/ ** * JS library: Promise.promisify from bluebirdjs ** /

My code is below

 global.Promise = require('bluebird'); const aws = require('aws-sdk'); const aswAccessKey = { accessKeyId: 'your-accesskey-id', secretAccessKey: 'your-secret-access-key' }; const fs = require('fs'); const path = require('path'); const uuidV4 = require('uuid/v4'); // Create S3 service object // available apiVersion: '2006-03-01', '2013-04-01', const s3 = new aws.S3(Object.assign(aswAccessKey, { apiVersion: '2013-04-01' })); function putObject(bucketName, file) { console.log('putObject into ', bucketName); /** * If we don't use versioned bucket, we must not pass VersionId */ const params = { Bucket: bucketName, Key: '', Body: 'Plain text', ACL: 'public-read', ContentType: 'binary', CacheControl: 'max-age=172800' }; return Promise .promisify(fs.readFile, { context: fs })(file) .then((fileData) => { console.log(fileData); params.Body = fileData; params.Key = 'g01/' + uuidV4() + '-' + path.basename(file); return Promise .promisify(s3.putObject, { context: s3 })(params) .then((data) => { console.log('successful'); console.log(data); }) .catch((err) => { console.log('Error', err); }); }) .catch(() => { }); } 
+1
source

Yes, you forgot about the callback to the writeFile function, so when you started uploading to Amazon S3, your file was not saved completely. You must not forget that node.js is asynchronous and the application will not wait for fs.writeFile finish working, it will just launch s3.putObject at the same time.

0
source

Source: https://habr.com/ru/post/973876/


All Articles