I'm working on an AWS lambda function that take data from two mongodb databases, find the matching records between two collections by 'id' and calculate each record commission by fixed rate. Then export data to a xls file and upload it to s3.
connect.js
const MongoClient = require('mongodb').MongoClient;
const getAllDocument = async (dbString, dbName, collectionName) => {
console.log('Initializing connection');
const client = await MongoClient.connect(dbString, { useUnifiedTopology: true }).catch(err => {
console.error(err);
});
if(!client){
return;
}
// Get collection data
try {
return await client.db(dbName).collection(collectionName).find().toArray();
} catch(err) {
console.error(err);
} finally {
client.close();
}
};
const getDocument = async(dbString, dbName, collectionName, property) => {
const client = await MongoClient.connect(dbString, {useUnifiedTopology: true }).catch(err => {
console.error(err);
});
if(!client){
return;
}
try {
let query = { name: property };
return await client.db(dbName).collection(collectionName).findOne(query);
} catch(err) {
console.error(err);
} finally {
client.close();
}
};
module.exports.getAllDocument = getAllDocument;
module.exports.getDocument = getDocument;
lambda.js
const XLSX = require('xlsx');
const fs = require('fs');
const AWS = require('aws-sdk');
const getAllDocument = require("./connect.js").getAllDocument;
const getDocument = require("./connect.js").getDocument;
var bucketName = process.env.S3_BUCKET;
const db_host_seller = process.env.DB_HOST_SELLER;
const db_name_seller = process.env.DB_NAME_SELLER;
const db_host_retail = process.env.DB_HOST_RETAIL;
const db_name_retail = process.env.DB_NAME_RETAIL;
const sellerName = process.env.SELLER;
const retailerName = process.env.RETAILER;
s3 = new AWS.S3();
loadData = async () => {
retailData = await getAllDocument(db_host_retail, db_name_retail, "retailerCollection");
sellerData = await getAllDocument(db_host_seller, db_name_seller, "sellerCollection");
seller = await getDocument(db_host_seller, db_name_seller, "sellerCollection", sellerName);
};
exports.handler = (event) => {
// TODO implement
loadData().then(() => {
// Find retailer's rate
let rate = seller.retailers.find(obj => {
return obj.name === retailerName;
}).rate;
// Match records between seller and retailer data
let matchedItem = [];
for (var i in sellerData) {
// Convert date format
sellerData[i]["Connection Date"] = sellerData[i]["Connection Date"].toDateString()
delete sellerData[i]['_id'];
if (sellerData[i]["Disconnection Date"] !== "") {
sellerData[i]["Disconnection Date"] = sellerData[i]["Disconnection Date"].toDateString();
}
for( var j in retailData){
if (sellerData[i]["id"] == retailData[j]["id"]) {
sellerData[i]['Commission'] = rate * sellerData[i]['TotalPayment'] ;
matchedItem.push(sellerData[i]);
}
}
}
// Export to xls file
let worksheet = XLSX.utils.json_to_sheet(matchedItem);
let workbook = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(workbook, worksheet, 'Commission')
XLSX.writeFile(workbook, '/tmp/Commission.xls');
const file = fs.readFileSync('/tmp/Commission.xls');
const params = {
Bucket: bucketName,
Key: 'Commission.xls', // File name to save in S3
Body: file
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded to: ` + data.Location);
});
});
};
I would like to find out if there is any way for me to improve the code in terms of readability and performance.