php'); $csv_file = $_FILES['csv_file']['tmp_name']; if (is_file($csv_file)) { $input = fopen($csv_file, 'a+'); $row = fgetcsv($input, 1024, ','); // here you got the header while ($row = fgetcsv($input, 1024, ',')) { // insert into the database $date = date('Y-m-d H:i:s'); $sql = 'INSERT INTO users(first_name,last_name ...
Select the table of the database that you want to export and click on the Export tab from the right side. Select the CSV format from the Format drop-down list and click on the Go button. Select the Save File option and press the OK button. The file will be downloaded in the Downloads folder.
I personally use this function to create CSV content from any array.
function array2csv(array &$array)
{
if (count($array) == 0) {
return null;
}
ob_start();
$df = fopen("php://output", 'w');
fputcsv($df, array_keys(reset($array)));
foreach ($array as $row) {
fputcsv($df, $row);
}
fclose($df);
return ob_get_clean();
}
Then you can make your user download that file using something like:
function download_send_headers($filename) {
// disable caching
$now = gmdate("D, d M Y H:i:s");
header("Expires: Tue, 03 Jul 2001 06:00:00 GMT");
header("Cache-Control: max-age=0, no-cache, must-revalidate, proxy-revalidate");
header("Last-Modified: {$now} GMT");
// force download
header("Content-Type: application/force-download");
header("Content-Type: application/octet-stream");
header("Content-Type: application/download");
// disposition / encoding on response body
header("Content-Disposition: attachment;filename={$filename}");
header("Content-Transfer-Encoding: binary");
}
Usage example:
download_send_headers("data_export_" . date("Y-m-d") . ".csv");
echo array2csv($array);
die();
You can export the date using this command.
<?php
$list = array (
array('aaa', 'bbb', 'ccc', 'dddd'),
array('123', '456', '789'),
array('"aaa"', '"bbb"')
);
$fp = fopen('file.csv', 'w');
foreach ($list as $fields) {
fputcsv($fp, $fields);
}
fclose($fp);
?>
First you must load the data from the mysql server in to a array
Just for the record, concatenation is waaaaaay faster (I mean it) than fputcsv
or even implode
; And the file size is smaller:
// The data from Eternal Oblivion is an object, always
$values = (array) fetchDataFromEternalOblivion($userId, $limit = 1000);
// ----- fputcsv (slow)
// The code of @Alain Tiemblo is the best implementation
ob_start();
$csv = fopen("php://output", 'w');
fputcsv($csv, array_keys(reset($values)));
foreach ($values as $row) {
fputcsv($csv, $row);
}
fclose($csv);
return ob_get_clean();
// ----- implode (slow, but file size is smaller)
$csv = implode(",", array_keys(reset($values))) . PHP_EOL;
foreach ($values as $row) {
$csv .= '"' . implode('","', $row) . '"' . PHP_EOL;
}
return $csv;
// ----- concatenation (fast, file size is smaller)
// We can use one implode for the headers =D
$csv = implode(",", array_keys(reset($values))) . PHP_EOL;
$i = 1;
// This is less flexible, but we have more control over the formatting
foreach ($values as $row) {
$csv .= '"' . $row['id'] . '",';
$csv .= '"' . $row['name'] . '",';
$csv .= '"' . date('d-m-Y', strtotime($row['date'])) . '",';
$csv .= '"' . ($row['pet_name'] ?: '-' ) . '",';
$csv .= PHP_EOL;
}
return $csv;
This is the conclusion of the optimization of several reports, from ten to thousands rows. The three examples worked fine under 1000 rows, but fails when the data was bigger.
I recommend parsecsv-for-php to get around a number any issues with nested newlines and quotes.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With