5

I am creating a CSV file on the fly from JSON content and uploading generated CSV file on S3 bucket rather than first saving the file locally.

Below is my code snippet, as using below code my CSV file uploaded on S3 bucket but it does not seems to be in correct CSV format.

var uploadCSVFileOnS3Bucket = function(next, csvFileContent,results) {
    console.log("uploadCSVFileOnS3Bucket function started");
    var bufferObject = new Buffer.from(JSON.stringify(csvFileContent));
    var filePath = configurationHolder.config.s3UploadFilePath;
    var s3 = new AWS.S3();
    var params = {
        Bucket: 'bucket_name'
        Key: 's3UploadFilePath',
        Body: bufferObject,
        CacheControl:'public, max-age=86400'
    }
    s3.upload(params, function(err, data) {
        if (err) {
            console.log("Error at uploadCSVFileOnS3Bucket function",err);
            next(err);
        } else {
            console.log("File uploaded Successfully");
            next(null, filePath);
        }
    });
};

Also, I am using "json2csv" npm module for generating csv file content from JSON.

Below is the code:

var generateCSVFile = function(next,callback,csvFileContent) {
   console.log("generateCSVFile function started",csvFileContent);
   if(csvFileContent && csvFileContent.length>0) {
     var fields = ['field1','field2','field3',........];
     var csv = json2csv({ data: csvFileContent, fields: fields });
     console.log('created',csv);
     next(null,csv);
   }
   else {
     next(null,[]);
   }
 }

Please let us know where the above code is going wrong.

4 Answers 4

7

Hi I tried again with below headers values and it worked for me. Below is the code :

var s3 = new AWS.S3();
var params = {
    Bucket: bucketName,
    Key: filePath,
    Body: csvFileContent,
    ContentType: 'application/octet-stream',
    ContentDisposition: contentDisposition(filePath, {
        type: 'inline'
    }),
    CacheControl: 'public, max-age=86400'
}
s3.putObject(params, function(err, data) {
    if (err) {
        console.log("Error at uploadCSVFileOnS3Bucket function", err);
        next(err);
    } else {
        console.log("File uploaded Successfully");
        next(null, filePath);
    }
});
Sign up to request clarification or add additional context in comments.

3 Comments

and this contentDisposition funcion where it is??
CacheControl - does this option clear cache before uploading the file?
@ArjunSingh No, actually this CacheControl sets a cache time to the CDN when we try to access S3 object from its linked CDN URL. To remove cache, we manually need to remove the CDN cache else it will only point to S3 object once its cache time expires.
2

add ContentDisposition: 'attachment' in your params as well.

otherwise you can also read file and upload to s3

fs.readFile(FILEPATH, function(err, file_buffer) {
            var params = {
                Bucket:  //bucketname,
                Key:key,
                ContentDisposition: 'attachment',
                Body: file_buffer
            };
            s3.upload(params, function(err, data) {
                if (err) {
                    console.log("Error in upload");
                    callback(err, null)
                }
                if (data) {
                    console.log("Upload Success", data);
                    callback(null, data)
                }
            });
});

2 Comments

I tried using the above Header but it doesn't provided me the correct result.
Hi I tried again with the above header and it works, I added the whole code in a separate comment. Thanks
2

using async await

import { parse } from "json2csv";

const saveCsv = async () => {
  const payload = [{ a: 1, b: 2 }]
  const csvPayload = parse(payload, { header: true, defaultValue: "-----"});

  const s3Key = 'filename.csv';
  const bucketName = 'bucket-name';

  await s3.put(bucketName, s3Key, csvPayload);
}

just like that without creating a buffer and using JSON.stringify()

Comments

0

Try this, it worked for me:

var fs = require('file-system')
var AWS = require('aws-sdk')

AWS.config.update({
    region: '', // use appropriate region
    accessKeyId: '', // use your access key
    secretAccessKey: '' // user your secret key
})

var s3 = new AWS.S3()

fs.readFile('contacts.csv','utf-8',  (err, data) => {
    if (err) throw err;
    const params = {
      Bucket: 'testBucket', // pass your bucket name
      Key: 'contacts.csv', // file will be saved as testBucket/contacts.csv
      Body : data
    };

    s3.upload(params, (s3Err, data) => {
      if (s3Err) throw s3Err
      console.log(`File uploaded successfully at ${data.Location}`)
    });
   });

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.