The best way to do this is using the .aggregate() method which provides access to the aggregation pipeline.
In your pipeline you only need one stage which is the $project where you use the $map which returns an array of the concatenated string. Of course the $concat operator concatenates strings and returns the concatenated string.
You then iterate your aggregation result which is a cursor and update your documents using "bulk" operations for maximum efficiency.
var bulkOp = db.users.initializeOrderedBulkOp();
var count = 0;
db.users.aggregate([
{ "$project": {
"roles": {
"$map": {
"input": "$roles",
"as": "role",
"in": { "$concat": [ "ROLE_", "$$role" ] }
}
}
}}
]).forEach(function(doc) {
bulkOp.find( { "_id": doc._id } ).updateOne(
{ "$set": { "roles": doc.roles } }
);
count++;
if (count % 300 === 0) {
// Execute per 300 operations and re-init
bulkOp.execute();
bulkOp = db.users.initializeOrderedBulkOp();
}
})
// Clean up queues
if (count > 0)
bulkOp.execute();
MongoDB 3.2 deprecates Bulk() and its associated methods and provides the .bulkWrite() method.
var requests = [];
db.users.aggregate([
{ "$project": {
"roles": {
"$map": {
"input": "$roles",
"as": "role",
"in": { "$concat": [ "ROLE_", "$$role" ] }
}
}
}}
]).forEach( document => {
requests.push(
{ "updateOne":
{
"filter": { "_id": doc._id },
"update": { "$set": { "roles": doc.roles } }
}
}
);
if (requests.length === 1000) {
// Execute per 1000 operations
db.users.bulkWrite(requests);
requests = [];
}
});
db.users.bulkWrite(requests);
Your documents then look like this:
{
"_id" : "acc1",
"email" : "[email protected]",
"password" : "password",
"roles" : [
"ROLE_ADMIN",
"ROLE_USER"
]
}
{
"_id" : "acc2",
"email" : "[email protected]",
"password" : "password",
"roles" : [
"ROLE_USER"
]
}
{
"_id" : "acc3",
"email" : "[email protected]",
"password" : "password",
"roles" : [
"ROLE_ADMIN",
"ROLE_SYSTEM",
"ROLE_USER"
]
}