Export All from DataTables with Server Side processing?
I have this working, mostly. It is now timing out, but that's a separate issue due to data size not to this working. For small datasets, it works perfectly.
This is how I create the button (it's the export button that I'm using here):
"buttons": [{
extend: 'collection',
text: 'Selection',
buttons: ['selectAll', 'selectNone']
}, {
extend: 'collection',
text: 'Export',
buttons: ['export', 'excel', 'csv', 'pdf', { extend: 'excel',
text: 'Export Current Page',
exportOptions: {
modifier: {
page: 'current'
}
},
customize: function (xlsx)
{
var sheet = xlsx.xl.worksheets['sheet1.xml'];
$('row:first c', sheet).attr('s', '7');
}
}]
}
]
This is the initialization of the button created above:
$.fn.dataTable.ext.buttons.export =
{
className: 'buttons-alert',
id: 'ExportButton',
text: "Export All Test III",
action: function (e, dt, node, config)
{
var SearchData = dt.rows({ filter: 'applied' }).data();
var SearchData1 = dt.search();
console.log(SearchData);
var OrderData = dt.order();
console.log(SearchData1);
var NumCol = SearchData[0].length;
var NumRow = SearchData.length;
var SearchData2 = [];
for (j = 0; j < NumRow; j++)
{
var NewSearchData = SearchData[j];
for (i = 0; i < NewSearchData.length; i++)
{
NewSearchData[i] = NewSearchData[i].replace("<div class='Scrollable'>", "");
NewSearchData[i] = NewSearchData[i].replace("</div>", "");
}
SearchData2.push([NewSearchData]);
}
for (i = 0; i < SearchData2.length; i++)
{
for (j = 0; j < SearchData2[i].length; j++)
{
SearchData2[i][j] = SearchData2[i][j].join('::');
}
}
SearchData2 = SearchData2.join("%%");
window.location.href = './ServerSide.php?ExportToExcel=Yes';
}
};
And here is the part of the ServerSide.php file that gets the data and sends it to the server for processing:
require('FilterSort.class.php');
if (isset($_GET['ExportToExcel']) && $_GET['ExportToExcel'] == 'Yes')
{
$request = @unserialize($_COOKIE['KeepPost']);
$DataReturn = json_encode(FilterSort::complex($request,$sqlConnect,$table,$primaryKey,$ColumnHeader));
require './ExportAllToExcel.php';
}
else
{
echo json_encode(FilterSort::complex($request,$sqlConnect,$table,$primaryKey,$ColumnHeader));
}
This is how I set the cookie that I use to keep the search and sort criteria:
if(isset($_POST['draw']))
{
$KeepPost = $_POST;
$KeepPost['length'] = -1;
$PostKept = serialize($KeepPost);
setcookie("KeepPost",$PostKept,time() + (60*60*24*7));
}
All this combined sends the correct criteria to FilterSort.class.php which should process the criteria and return the dataset to ExportAllToExcell.php which then creates the Excel file. Right now I'm sending it huge reports and it times out, though.
UPDATE
I have slightly changed the way that I do this:
Here is the new set of buttons:
"buttons": [{
extend: 'collection',
text: 'Export',
buttons: ['export', { extend: 'csv',
text: 'Export All To CSV', //Export all to CSV file
action: function (e, dt, node, config)
{
window.location.href = './ServerSide.php?ExportToCSV=Yes';
}
}, 'csv', 'pdf', { extend: 'excel',
text: 'Export Current Page', //Export to Excel only the current page and highlight the first row as headers
exportOptions: {
modifier: {
page: 'current'
}
},
customize: function (xlsx)
{
var sheet = xlsx.xl.worksheets['sheet1.xml'];
$('row:first c', sheet).attr('s', '7');
}
}]
}
]
Here is how I create the Export All to Excel button:
$.fn.dataTable.ext.buttons.export =
{
className: 'buttons-alert', //Adds the "Export all to Excel" button
id: 'ExportButton',
text: "Export All To Excel",
action: function (e, dt, node, config)
{
window.location.href = './ServerSide.php?ExportToExcel=Yes';
}
};
These now send the data to the same ServerSide.php file that I was using before:
require('FilterSort.class.php');
if (isset($_GET['ExportToExcel']) && $_GET['ExportToExcel'] == 'Yes')
{
include 'Helper/LogReport.php';
$GetSQL = "Select Value from PostKept where UserName = '" .$_COOKIE['UserName']. "'";
$KeepResult = $conn->query($GetSQL);
$KeepResults = $KeepResult->fetchALL(PDO::FETCH_ASSOC);
$request = unserialize($KeepResults[0]['Value']);
$DataReturn = json_encode(FilterSort::complex($request,$sqlConnect,$table,$primaryKey,$ColumnHeader,1));
require './ExportAllToExcel.php';
I have also changed the way that I keep the query, I have it now also keeping the Table Name and UserName like this:
include 'DBConn.php';
$KeepPost = $_POST; //POST holds all the data for the search
$KeepPost['length'] = -1; //-1 means pulling the whole table
$PostKept = serialize($KeepPost); //This takes the array of data and turns it into a string for storage in SQL
$SQLCheck = "select distinct UserName from PostKept"; //Gets all the distinct Usernames of users that have used the Report Dashboard.
$sth = $conn->query($SQLCheck);
$CheckedUser = $sth->fetchALL(PDO::FETCH_ASSOC);
foreach($CheckedUser as $User)
{
foreach($User as $Index => $Who)
{
$FoundUsers[] = $Who; //Taking all the found users and placing them into a simpler array for searching later
}
}
if(isset($_COOKIE['UserName']) && in_array($_COOKIE['UserName'],$FoundUsers)) //If the user already has an entry update it with new information
{
$TSQL = "UPDATE PostKept set Value = '" .$PostKept. "', TableName = '" .$TableName. "' where UserName = '" .$_COOKIE['UserName']. "'";
}
else
{
if(isset($_COOKIE['UserName'])) //If this is a new user
{
$TSQL = "INSERT into PostKept(Value, TableName, UserName) select '" .$PostKept. "','" .$TableName. "','" .$_COOKIE['UserName']. "'";
}
else //If this is on the Prod site and the User info is not yet kept
{
$TSQL = "INSERT into PostKept(Value, TableName) select '" .$PostKept. "','" .$TableName. "'";
}
}
$sth = $conn->prepare($TSQL);
$sth->execute();
This is now what all combines to send the data to the ExportAllToExcel.php file that I have and then it in turn creates the file.
First add the follwoing code in DataTable
"dom": 'Blfrtip',
"buttons": [
{
"extend": 'excel',
"text": '<button class="btn"><i class="fa fa-file-excel-o" style="color: green;"></i> Excel</button>',
"titleAttr": 'Excel',
"action": newexportaction
},
],
Then add this function inside $(document).ready() function
function newexportaction(e, dt, button, config) {
var self = this;
var oldStart = dt.settings()[0]._iDisplayStart;
dt.one('preXhr', function (e, s, data) {
// Just this once, load all data from the server...
data.start = 0;
data.length = 2147483647;
dt.one('preDraw', function (e, settings) {
// Call the original action function
if (button[0].className.indexOf('buttons-copy') >= 0) {
$.fn.dataTable.ext.buttons.copyHtml5.action.call(self, e, dt, button, config);
} else if (button[0].className.indexOf('buttons-excel') >= 0) {
$.fn.dataTable.ext.buttons.excelHtml5.available(dt, config) ?
$.fn.dataTable.ext.buttons.excelHtml5.action.call(self, e, dt, button, config) :
$.fn.dataTable.ext.buttons.excelFlash.action.call(self, e, dt, button, config);
} else if (button[0].className.indexOf('buttons-csv') >= 0) {
$.fn.dataTable.ext.buttons.csvHtml5.available(dt, config) ?
$.fn.dataTable.ext.buttons.csvHtml5.action.call(self, e, dt, button, config) :
$.fn.dataTable.ext.buttons.csvFlash.action.call(self, e, dt, button, config);
} else if (button[0].className.indexOf('buttons-pdf') >= 0) {
$.fn.dataTable.ext.buttons.pdfHtml5.available(dt, config) ?
$.fn.dataTable.ext.buttons.pdfHtml5.action.call(self, e, dt, button, config) :
$.fn.dataTable.ext.buttons.pdfFlash.action.call(self, e, dt, button, config);
} else if (button[0].className.indexOf('buttons-print') >= 0) {
$.fn.dataTable.ext.buttons.print.action(e, dt, button, config);
}
dt.one('preXhr', function (e, s, data) {
// DataTables thinks the first item displayed is index 0, but we're not drawing that.
// Set the property to what it was before exporting.
settings._iDisplayStart = oldStart;
data.start = oldStart;
});
// Reload the grid with the original page. Otherwise, API functions like table.cell(this) don't work properly.
setTimeout(dt.ajax.reload, 0);
// Prevent rendering of the full data to the DOM
return false;
});
});
// Requery the server with the new one-time export settings
dt.ajax.reload();
}
I just ran into this and came up with an alternate solution.
In the DataTable options, add this:
"lengthMenu": [[10, 25, 50, -1], [10, 25, 50, "All"]]
This will allow user to select all rows and will send -1 to server in 'length' query string parameter. At server side, you need to handle negative number and allow to return all rows when -1 is received.
This would display all rows in table and will export all of them.
I understand that this may not be suitable for 50-60K rows but for a smaller dataset, this can work without implementing any additional code at server and client side both.