I prefer to write recursive functions and use a stack for something like this. This will only run success
or error
once at the end of the processing and will fetch each url sequentially. However, fetching them sequentially may make the loading take longer because it does not allow the browser to parallelize the requests! A different variation from this just be to run all the requests parallel and just process the results sequentially.
function fetch (urls, success, error) {
if (urls.length) {
$.ajax({
url: urls[0],
success: function () {
fetch(urls.slice(1), success, error)
},
error: error
})
} else {
success()
}
}
fetch(["url1.html", "url2.html", ...], function () {
// success
}, function () {
// failure
})
As I just typed this up, there may be some small errors, but the concept is sound. There is more that can be done such as passing values/results out, but those are left as an exercise ;-) With the same warning applying, here is a version which just processes the results sequentially -- the requests may be sent in parallel:
function fetch (urls, success, error) {
var fetched = 0
var results = []
var wasError = false
function _fetch (i) {
if (i < urls.length) {
$.ajax({
url: urls[i],
success: function (result) {
// report success when all the results are in
results[i] = result
if (++fetched == urls.length) {
success(results)
}
},
error: function () {
if (!wasError) {
wasError = true
error()
}
}
})
// re-prime right away
_fetch(i + 1)
}
}
_fetch(0)
}
var urls = ["url1.html", "url2.html", ...]
fetch(urls, function (results) {
$.each(results, ...)
}, function () {
// error :(
})
Happy coding.