Fast number of rows with big tables in PostgreSQL (thanks to juzna)

This commit is contained in:
Jakub Vrana 2011-07-29 17:08:06 +02:00
parent 2e1d38a920
commit c82829942c
7 changed files with 36 additions and 2 deletions

View file

@ -490,6 +490,10 @@ WHERE OBJECT_NAME(i.object_id) = " . q($table)
return $return;
}
function found_rows($table_status, $where) {
return false;
}
function foreign_keys($table) {
$return = array();
foreach (get_rows("EXEC sp_fkeys @fktable_name = " . q($table)) as $row) {

View file

@ -822,6 +822,15 @@ if (!defined("DRIVER")) {
return $connection->query("EXPLAIN $query");
}
/** Get approximate number of rows
* @param array
* @param array
* @return int or null if approximate number can't be retrieved
*/
function found_rows($table_status, $where) {
return ($where || $table_status["Engine"] != "InnoDB" ? null : $table_status["Rows"]);
}
/** Get user defined types
* @return array
*/

View file

@ -275,6 +275,10 @@ ORDER BY uc.constraint_type, uic.column_position", $connection2) as $row) {
return $connection->query("SELECT * FROM plan_table");
}
function found_rows($table_status, $where) {
return false;
}
function alter_table($table, $name, $fields, $foreign, $comment, $engine, $collation, $auto_increment, $partitioning) {
$alter = $drop = array();
foreach ($fields as $field) {

View file

@ -511,6 +511,18 @@ ORDER BY p.proname');
return $connection->query("EXPLAIN $query");
}
function found_rows($table_status, $where) {
global $connection;
if (ereg(
" rows=([0-9]+)",
$connection->result("EXPLAIN SELECT * FROM " . idf_escape($table_status["Name"]) . ($where ? " WHERE " . implode(" AND ", $where) : "")),
$regs
)) {
return $regs[1];
}
return false;
}
function types() {
return get_vals("SELECT typname
FROM pg_type

View file

@ -503,6 +503,10 @@ if (isset($_GET["sqlite"]) || isset($_GET["sqlite2"])) {
return $connection->query("EXPLAIN $query");
}
function found_rows($table_status, $where) {
return false;
}
function types() {
return array();
}

View file

@ -373,8 +373,8 @@ if (!$columns) {
if ($rows || $page) {
$exact_count = true;
if ($_GET["page"] != "last" && +$limit && count($group) >= count($select) && ($found_rows >= $limit || $page)) {
$found_rows = $table_status["Rows"];
if (!isset($found_rows) || $where || ($table_status["Engine"] == "InnoDB" && $found_rows < max(1e4, 2 * ($page + 1) * $limit))) {
$found_rows = found_rows($table_status, $where);
if ($found_rows < max(1e4, 2 * ($page + 1) * $limit)) {
// slow with big tables
ob_flush(); //! doesn't work with AJAX
flush();

View file

@ -1,5 +1,6 @@
Adminer 3.3.2-dev:
Don't scroll with AJAX select order and alter move column
Fast number of rows with big tables (PostgreSQL)
Adminer 3.3.1 (released 2011-07-27):
Fix XSS introduced in Adminer 3.2.0