I've understood how to compute the time and space complexity of algorithms, and I found an interesting problem for training.
// count the occurrence of an element in an array
// list is already sorted
function count (list, item, start, end) {
if (start > end) {
return 0;
}
const mid = Math.floor((start + end) / 2);
if (list[mid] < item) {
return count(list, item, mid + 1, end);
}
if (list[mid] > item) {
return count(list, item, start, mid - 1);
}
return count(list, item, start, mid - 1) + 1 + count(list, item, mid + 1, end);
}
The answer is O(n). I thought it should be something with a logarithm. Can somebody explain to me, why I'm wrong?