Each item insertion will consume 1 WCU (per KB), there is no other way.
Instead of individual inserts you could use batchWrite which is more efficient. But still, you need to adjust your insertion rate to the table write throughput.
Also, be aware that your inserts are using 300 seconds of burst capacity (your first 300 insertions) at the beginning but after those are consumed, you can only go 1 write/s with 1 WCU.
Here is an example of batch inserting that will wait between chunks and reschedule failed inserts:
async function batchedAsync({list, callback, chunkSize=10, msDelayBetweenChunks=0}) {
const emptyList = new Array(Math.ceil(list.length / chunkSize)).fill();
const clonedList = list.slice(0);
const chunks = emptyList.map(_ => clonedList.splice(0, chunkSize));
for (let chunk of chunks) {
if (msDelayBetweenChunks) {
await new Promise(resolve => setTimeout(resolve, msDelayBetweenChunks));
}
await callback(chunk, chunks);
}
}
async function writeItems(tableName, chunk, chunks) {
const {UnprocessedItems} = await documentClient.batchWrite({
RequestItems: {
[tableName]: chunk.map(item => {
return {PutRequest: {Item: item}};
})
}
}).promise();
if (UnprocessedItems.length) {
chunks.push(UnprocessedItems);
}
}
// Example
batchedAsync({
list: itemsToInsert,
callback: writeItems.bind(null, myTableName),
chunkSize: 2, // adjust to provisioned throughput. Max 25 (batchWrite dynamodb limit)
msDelayBetweenChunks: 1000
});