I noticed some unexpected result during working with big arrays in js. In my test I'm creating big array of some class instances with for loop and within loop I'm assigning same string to all object's "x" property and after array initialization doing some calculation with this array. If constructor initializes "x" property with null, array always processing better, even if it's doing extra statement. Why this is happening ? Here is my code for testing. I tested in on chrome.
function f1() {
var P = function () {
this.value = 1
};
var big_array = new Array(10000000).fill(1).map((x, index)=> {
p = new P();
if (index > 5000000) {
p.x = "some_string";
}
return p;
});
big_array.reduce((sum, p)=> sum + p.value, 0);
}
function f2() {
var P = function () {
this.value = 1;
this.x = null;
};
var big_array = new Array(10000000).fill(1).map((x, index)=> {
p = new P();
if (index > 5000000) {
p.x = "some_string";
}
return p;
});
big_array.reduce((sum, p)=> sum + p.value, 0);
}
(function perform(){
var start = performance.now();
f1();
var duration = performance.now() - start;
console.log('duration of f1 ' + duration);
start = performance.now();
f2();
duration = performance.now() - start;
console.log('duration of f2 ' + duration);
})()
Output:
duration of f1 14099.85
duration of f2 11694.175000000001
f2you have one extra statement inside of thePfunction, which probably slows down the overall performance. Running the same code on my computer gives me:~2233 for f1and~5913 for f2..pa localvariable