The algorithm takes weighted data which must add up to 1. To generate probability from each objects cumulative sum. They then have to be tested against a random number to create some variance to serve and recalculate the weights later on. I am looking to try to optimize what i have written as there are a few levels of nested loops.

DEMO: http://jsfiddle.net/81o4zjwL/

Test Case: http://jsfiddle.net/rnhp5v2w/

`function weightedSort(data, productLimit) { var products = []; var output = []; var dataSum = 0; var sum = 0; var sortedData; var randomNumber; var numberToPop; var noOfproducts = productLimit; var j; var k; var l; var i; var x; // rescale weights if not equal to 1 for (k = 0; k < data.length; k++) { dataSum = dataSum + data[k].weight; } for (j = 0; j < data.length; j++) { data[j].weight = data[j].weight / dataSum; } // sort weights from heigh to low sortedData = data.sort(function (a, b) { return parseFloat(a.weight) - parseFloat(b.weight); }); sortedData.reverse(); output = sortedData; for (i = 0; i < noOfproducts; i++) { // generate random number randomNumber = Math.random(); // create cumulative distribution from weights for (x in output) { sum = sum + output[x].weight; // grab the new output and regenerate the cumulative sums output[x].cumul_sum = sum; } for (l = 0; l < output.length; l++) { // for cumulative weights greater than random number if (output[l].cumul_sum > randomNumber) { // take larger number numberToPop = output[l]; // push into product rotation products.push(numberToPop); // pop from output output.splice(l, 1); // rescale remaining weights up to 1 (divide each remaining weight by (1 - poped weight)) for (j = 0; j < output.length; j++) { output[j].weight = output[j].weight / (1 - numberToPop.weight); } // break out of loop, and recalculate to pull new product break; } } // reset dataSum = 0; sum = 0; } return products; } `