I have a scenario where I get thousands of records from the server as JSON and bind all records to the page. For each record, I am doing some calculations in jquery and binding data to UI. As record count is in 1000's the time take for calculation and binding data is more. Data on the page is bound at the end when all records calculations are done. Is there any option to bind data one by one or 10 by 10 and show binding on UI for that set. What I am trying to find is to execute $.each for 10 records at a time and append next set of 10 records to it and so on. any idea to make the page load faster? (Paging is not required for my requirement). Any clue can help.
<div id="keepFinalDataHere"></div>
$.each(data, function (i, record) {
content += "<div>" + record.id + "</div><div>" + record.fromId + "</div><div>" + record.subject + "</div>";
});
$(content).appendTo('#keepFinalDataHere');
In above code, content is built by getting several thousands of records and once the content is built, then it is being bound to the div. I am looking for an option to get first 10 items bind the data to make sure that users feel like the page is loaded, and then APPEND remaining items in sets of 100 or so to existing list.
In simple way you can do in chunks.
<div id="keepFinalDataHere"></div>
<script>
//.../
var chunkSize = 50;//what ever you want or could be dynamic based on data size
var $keepFinalDataHere = $('#keepFinalDataHere');
$.each(data, function (i, record) {
content += "<div>" + record.id + "</div><div>" + record.fromId + "</div><div>" + record.subject + "</div>";
if(i % chunkSize === 0){ // content chunk is ready
$keepFinalDataHere.append(content); // show records
content = '';//reset the content
}
});
if(!(content === '')){//any leftOver records
$keepFinalDataHere.append(content);
}
If you want to keep the UI responsive and want to be able to execute code in between rendering a large amount of DOM elements, you'll have to use a timeout mechanism. You can do so by passing your render method to setTimeout
.
Instead of adding the method to the stack and executing it immediately, setTimeout
pushes the method to a task queue and only executes it once the current js stack has cleared.
The main steps of the method I propose:
splice
to remove the first n
items from the arrayn
items to the DOMHere's the main part of the code, with comments, assuming:
testData
holds an array of data pointscreateRow
holds the logic to transform a data point to a rendered DOM elementINITIAL_CHUNK_SIZE
holds the number of rows you want to render without a timeout.DEFAULT_CHUNK_SIZE
holds the number of rows each following loop has to renderThe time out renderer (toRenderer
):
var toRenderer = function(s) {
// We need a copy because `splice` mutates an array
var dataBuffer = [].concat(testData);
var nextRender = function(s) {
// Default value that can be overridden
var chunkSize = s || DEFAULT_CHUNK_SIZE;
dataBuffer
.splice(0, chunkSize)
.forEach(createRow);
if (dataBuffer.length) {
setTimeout(nextRender);
}
};
// Triggers the initial (not timed out) render
nextRender(INITIAL_CHUNK_SIZE);
};
In the example below I've included a moving spinner to show how the render loop is able to hold a decent frame rate.
Note that the larger the DEFAULT_CHUNK_SIZE
, the faster you'll have all your items rendered. The tradeoff: once one render chunk takes more than 1/60s, you'll loose your smooth frame rate.
// SETTINGS
var DATA_LENGTH = 10000;
var DEFAULT_CHUNK_SIZE = 100;
var INITIAL_CHUNK_SIZE = 10;
var list = document.querySelector("ul");
var createRow = function(data) {
var div = document.createElement("div");
div.innerHTML = data;
list.appendChild(div);
};
// Blocking until all rows are rendered
var bruteRenderer = function() {
console.time("Brute renderer total time:");
testData.forEach(createRow);
console.timeEnd("Brute renderer total time:");
}
// Pushes "render assignments" to the "task que"
var toRenderer = function(s) {
console.time("Timeout renderer total time:");
var dataBuffer = [].concat(testData);
var nextRender = function(s) {
var chunkSize = s || DEFAULT_CHUNK_SIZE;
dataBuffer
.splice(0, chunkSize)
.forEach(createRow);
if (dataBuffer.length) {
setTimeout(nextRender);
} else {
console.timeEnd("Timeout renderer total time:");
}
};
nextRender(INITIAL_CHUNK_SIZE);
};
// EXAMPLE DATA, EVENT LISTENERS:
// Generate test data
var testData = (function() {
var result = [];
for (var i = 0; i < DATA_LENGTH; i += 1) {
result.push("Item " + i);
}
return result;
}());
var clearList = function() {
list.innerHTML = "";
};
// Attach buttons
document.querySelector(".js-brute").addEventListener("click", bruteRenderer);
document.querySelector(".js-to").addEventListener("click", toRenderer);
document.querySelector(".js-clear").addEventListener("click", clearList);
button {
display: inline-block;
margin-right: .5rem;
}
.spinner {
background: red;
border-radius: 50%;
width: 20px;
height: 20px;
animation-duration: 1s;
animation-timing-function: linear;
animation-direction: alternate;
animation-name: move;
animation-iteration-count: infinite;
}
@keyframes move {
from {
transform: translate3d(800%, 0, 0);
}
to {
transform: translate3d(0, 0, 0);
}
}
ul {
height: 200px;
overflow-y: scroll;
background: #efefef;
border: 1px solid #ccc;
}
<button class="js-brute">
Inject rows brute force
</button>
<button class="js-to">
Inject rows timeout
</button>
<button class="js-clear">
clear list
</button>
<pre></pre>
<div class="spinner"></div>
<ul>
</ul>
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With