This is because DOM changes are not rendered as long as a JavaScript function is running. User interface code is single-threaded and a browser locks while executing it. Normally this is not problem because JS is quite fast and functions do not run very long. But if they do, you see sluggish behavior as a result.
Your function needs to stop in the middle of its work to give the browser a chance to become responsive again. You can solve this by using setTimeout()
and remembering where you left off.
This should give you an idea:
// prepares a closure function to handle affected elements in chunks of n
function updatePartial(elems, chunksize) {
var current = 0;
return function() {
// changes n elements in a loop
for (var i=0; i<chunksize; i++) {
jQuery(elems[current+i]).addClass('highlighted');
}
current += chunksize;
// calls itself again after a short break
if (current < elems.length) setTimeout(arguments.callee, 10);
}
}
// aquire and execute the closure function
updatePartial(jQuery("selector").get(), 100)();
(Tested at http://jsfiddle.net/fPdAg/)
Closures are an elegant way of avoiding global variables you would need in other implementations.
EDIT: A generalized version of the above would be this:
// prepares a closure function to handle affected elements in chunks of n
function updatePartial(elems, chunksize, payload) {
var current = 0;
return function() {
// changes n elements in a loop
for (var i=0; i<chunksize; i++) {
// apply the payload function to current element
payload.apply(elems[current+i]);
}
current += chunksize;
// calls itself again after a short break
if (current < elems.length) setTimeout(arguments.callee, 10);
}
}
// aquire and execute the closure function, supply custom payload function
updatePartial(jQuery("selector").get(), 100, function() {
jQuery(this).addClass('highlighted');
})();