SlikGrid Virtual Scrolling - Huge dataset

半腔热情 提交于 2019-12-10 23:56:41

问题


I was able to extend the Ajax-Loading Example to suit my requirement.

In the current implementation, the loader in slick.remotemodel.js has a variable called data which is an array of javascript objects. This array is the datasource for the grid.As we keep scrolling, although the DOM has only page size of rows rendered, the javascript array keeps growing.

What is the upper limit of size of javascript object ? Is there a possibility of running out of memory ? Lets say my data set is in the order of ~125000 rows with multiple columns ~10. I realize more information might be required to define the size of the dataset but with the above info, can somebody provide some inputs ?

To handle the above scenarios, I updated the code. Please see my edit. Please let me know if I'm missing anything.

Edit: My solution was to call the clear method in onsuccess method to make sure the data array has only PAGESIZE # of items.

(function ($) {
    /***
    * Ajax loading example which is an extension
    * of the http://mleibman.github.com/SlickGrid/examples/example6-ajax-loading.html
    * example.
    */
    function RemoteModel() {
        // private

        var fromPage = 0; 
        var rows = 0; 

        var PAGESIZE = 250; 
        var data = { length: 0 };
        var h_request = null;
        var req = null; // ajax request

        // events
        var onDataLoading = new Slick.Event();
        var onDataLoaded = new Slick.Event();


        function init() {
        }


        function isDataLoaded(from, to) {
            for (var i = from; i <= to; i++) {
                if (data[i] == undefined || data[i] == null)
                    return false;
            }

            return true;
        }


        function clear() {
            for (var key in data) {
                delete data[key];
            }
            data.length = 0;
        }


        function ensureData(from, to) {

            if (req) {
                req.abort();

                for (var i = req.fromPage; i <= req.toPage; i++)
                    data[i * PAGESIZE] = undefined;
            }

            if (from < 0)
                from = 0;

            fromPage = Math.floor(from / PAGESIZE);
            var toPage = Math.floor(to / PAGESIZE);

            while (data[fromPage * PAGESIZE] !== undefined && fromPage < toPage)
                fromPage++;

            while (data[toPage * PAGESIZE] !== undefined && fromPage < toPage)
                toPage--;

            rows = (((toPage - fromPage) * PAGESIZE) + PAGESIZE);

            if (fromPage > toPage || ((fromPage == toPage) && data[fromPage * PAGESIZE] !== undefined)) {

                // TODO:  look-ahead
                return;
            }

            var url = "" ; // IMPORTANT : you should set this to your url which returns the data

            if (h_request != null) {
                clearTimeout(h_request);
            }

            h_request = setTimeout(function () {

                for (var i = fromPage; i <= toPage; i++)
                    data[i * PAGESIZE] = null; // null indicates a 'requested but not available yet'

                onDataLoading.notify({ from: from, to: to });

                req = $.ajax({
                    url: url,
                    dataType: 'json',
                    success: function (response) {
                        onSuccess(response);
                    },
                    error: function () {
                        onError(fromPage, toPage);
                    }
                });


                req.fromPage = fromPage;
                req.toPage = toPage;

            }, 100);
        }


        function onError(fromPage, toPage) {
            alert("error loading pages " + fromPage + " to " + toPage);
        }

        function onSuccess(response) {

            //Solution to keep the data array bounded to pagesize: Call the clear method to have only PAGESIZE elements in the data array at any given point
            clear(); 

        //The visisble # of rows in the viewport could be only ~20 but
           // i'm populating PageSIZE which acts like the client-side cache, in my case 250,
      // so that I avoid too many server hops
            var from = fromPage * PAGESIZE, to = from + PAGESIZE;

            data.length = response.count;

            for (var i = 0; i < response.Fields.length; i++) {
                data[from + i] = response.Fields[i];
                data[from + i].index = from + i;
            }

            req = null;

            onDataLoaded.notify({ from: from, to: to });
        }


        function reloadData(from, to) {
            for (var i = from; i <= to; i++)
                delete data[i];

            ensureData(from, to);
        }


        init();

        return {
            // properties
            "data": data,

            // methods
            "clear": clear,
            "isDataLoaded": isDataLoaded,
            "ensureData": ensureData,
            "reloadData": reloadData,

            // events
            "onDataLoading": onDataLoading,
            "onDataLoaded": onDataLoaded
        };
    }

    // Slick.Data.RemoteModel
    $.extend(true, window, { Slick: { Data: { RemoteModel: RemoteModel}} });
})(jQuery);
enter code here

Thanks


回答1:


My solution was to call the clear method in onsuccess method to make sure the data array has only PAGESIZE # of items. This will only keep data in the javascript object array which is required and everything else should be set to undefined.



来源:https://stackoverflow.com/questions/7149639/slikgrid-virtual-scrolling-huge-dataset

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!