Description:
`LIMIT` values are not adequately handled in `EXPLAIN`.
The result shows that a larger query_cost may have a shorter actual execution time.
A table with 1000000 rows data.
Here is the DDL:
```sql
CREATE TABLE `testdo` (
`id` bigint NOT NULL AUTO_INCREMENT,
`name` varchar(50) DEFAULT NULL,
`value` int NOT NULL,
`version` int NOT NULL ,
`created_at` datetime DEFAULT NULL,
`created_by` varchar(50) DEFAULT NULL,
`is_deleted` bit(1) DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
`modified_by` varchar(50) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
```
Two queries and some extra info:
**SQL 1(time cost:465ms query_cost:100873.90)**
```sql
select * from testdo order by id limit 810000,30;
```
execute explain command:
```sql
explain format=json select * from testdo order by id limit 810000,30;
```
result
```json
{
"query_block": {
"select_id": 1,
"cost_info": {
"query_cost": "100873.90"
},
"ordering_operation": {
"using_filesort": false,
"table": {
"table_name": "testdo",
"access_type": "index",
"key": "PRIMARY",
"used_key_parts": [
"id"
],
"key_length": "8",
"rows_examined_per_scan": 810030,
"rows_produced_per_join": 994719,
"filtered": "100.00",
"cost_info": {
"read_cost": "1402.00",
"eval_cost": "99471.90",
"prefix_cost": "100873.90",
"data_read_per_join": "599M"
},
"used_columns": [
"id",
"name",
"value",
"version",
"created_at",
"created_by",
"is_deleted",
"modified_at",
"modified_by"
]
}
}
}
}
```
explain ANALYZE
```sql
explain ANALYZE select * from testdo order by id limit 810000,30;
```
result:
```
-> Limit/Offset: 30/810000 row(s) (cost=69914.55 rows=30) (actual time=465.053..465.098 rows=30 loops=1)
-> Index scan on testdo using PRIMARY (cost=69914.55 rows=810030) (actual time=0.082..444.687 rows=810030 loops=1)
```
**SQL2(time cost:187ms query_cost:471480.20)**
```sql
select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
```
execute explain command:
```sql
explain format=json select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
```
result:
```json
{
"query_block": {
"select_id": 1,
"cost_info": {
"query_cost": "471480.20"
},
"nested_loop": [
{
"table": {
"table_name": "a",
"access_type": "ALL",
"rows_examined_per_scan": 810030,
"rows_produced_per_join": 810030,
"filtered": "100.00",
"cost_info": {
"read_cost": "10127.88",
"eval_cost": "81003.00",
"prefix_cost": "91130.88",
"data_read_per_join": "12M"
},
"used_columns": [
"id"
],
"materialized_from_subquery": {
"using_temporary_table": true,
"dependent": false,
"cacheable": true,
"query_block": {
"select_id": 2,
"cost_info": {
"query_cost": "98323.63"
},
"table": {
"table_name": "testdo",
"access_type": "index",
"key": "PRIMARY",
"used_key_parts": [
"id"
],
"key_length": "8",
"rows_examined_per_scan": 962512,
"rows_produced_per_join": 962512,
"filtered": "100.00",
"using_index": true,
"cost_info": {
"read_cost": "2072.43",
"eval_cost": "96251.20",
"prefix_cost": "98323.63",
"data_read_per_join": "580M"
},
"used_columns": [
"id"
]
}
}
}
}
},
{
"table": {
"table_name": "t",
"access_type": "eq_ref",
"possible_keys": [
"PRIMARY"
],
"key": "PRIMARY",
"used_key_parts": [
"id"
],
"key_length": "8",
"ref": [
"a.id"
],
"rows_examined_per_scan": 1,
"rows_produced_per_join": 810030,
"filtered": "100.00",
"cost_info": {
"read_cost": "299346.33",
"eval_cost": "81003.00",
"prefix_cost": "471480.20",
"data_read_per_join": "488M"
},
"used_columns": [
"id",
"name",
"value",
"version",
"created_at",
"created_by",
"is_deleted",
"modified_at",
"modified_by"
]
}
}
]
}
}
```
explain ANALYZE
```sql
explain ANALYZE select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
```
result:
```
-> Limit: 200 row(s) (cost=397678.84 rows=30) (actual time=187.429..187.622 rows=30 loops=1)
-> Nested loop inner join (cost=397678.84 rows=30) (actual time=187.428..187.621 rows=30 loops=1)
-> Table scan on a (cost=98326.73..98329.51 rows=30) (actual time=187.410..187.413 rows=30 loops=1)
-> Materialize (cost=98326.63..98326.63 rows=30) (actual time=187.409..187.409 rows=30 loops=1)
-> Limit/Offset: 30/810000 row(s) (cost=98323.63 rows=30) (actual time=187.376..187.392 rows=30 loops=1)
-> Covering index scan on testdo using PRIMARY (cost=98323.63 rows=962512) (actual time=0.034..167.223 rows=810030 loops=1)
-> Single-row index lookup on t using PRIMARY (id=a.id) (cost=0.37 rows=1) (actual time=0.007..0.007 rows=1 loops=30)
```
How to repeat:
1. create table.
CREATE TABLE `testdo` (
`id` bigint NOT NULL AUTO_INCREMENT,
`name` varchar(50) DEFAULT NULL,
`value` int NOT NULL,
`version` int NOT NULL ,
`created_at` datetime DEFAULT NULL,
`created_by` varchar(50) DEFAULT NULL,
`is_deleted` bit(1) DEFAULT NULL,
`modified_at` datetime DEFAULT NULL,
`modified_by` varchar(50) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
2.mock 1000000 rows random data.
3.select * from testdo order by id limit 810000,30;
4.explain format=json select * from testdo order by id limit 810000,30;
5.explain ANALYZE select * from testdo order by id limit 810000,30;
6.select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
7.explain format=json select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
8.explain ANALYZE select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id;
Suggested fix:
I don't know how it comes up, but the `explain` should work with `limit` .
Description: `LIMIT` values are not adequately handled in `EXPLAIN`. The result shows that a larger query_cost may have a shorter actual execution time. A table with 1000000 rows data. Here is the DDL: ```sql CREATE TABLE `testdo` ( `id` bigint NOT NULL AUTO_INCREMENT, `name` varchar(50) DEFAULT NULL, `value` int NOT NULL, `version` int NOT NULL , `created_at` datetime DEFAULT NULL, `created_by` varchar(50) DEFAULT NULL, `is_deleted` bit(1) DEFAULT NULL, `modified_at` datetime DEFAULT NULL, `modified_by` varchar(50) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ``` Two queries and some extra info: **SQL 1(time cost:465ms query_cost:100873.90)** ```sql select * from testdo order by id limit 810000,30; ``` execute explain command: ```sql explain format=json select * from testdo order by id limit 810000,30; ``` result ```json { "query_block": { "select_id": 1, "cost_info": { "query_cost": "100873.90" }, "ordering_operation": { "using_filesort": false, "table": { "table_name": "testdo", "access_type": "index", "key": "PRIMARY", "used_key_parts": [ "id" ], "key_length": "8", "rows_examined_per_scan": 810030, "rows_produced_per_join": 994719, "filtered": "100.00", "cost_info": { "read_cost": "1402.00", "eval_cost": "99471.90", "prefix_cost": "100873.90", "data_read_per_join": "599M" }, "used_columns": [ "id", "name", "value", "version", "created_at", "created_by", "is_deleted", "modified_at", "modified_by" ] } } } } ``` explain ANALYZE ```sql explain ANALYZE select * from testdo order by id limit 810000,30; ``` result: ``` -> Limit/Offset: 30/810000 row(s) (cost=69914.55 rows=30) (actual time=465.053..465.098 rows=30 loops=1) -> Index scan on testdo using PRIMARY (cost=69914.55 rows=810030) (actual time=0.082..444.687 rows=810030 loops=1) ``` **SQL2(time cost:187ms query_cost:471480.20)** ```sql select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; ``` execute explain command: ```sql explain format=json select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; ``` result: ```json { "query_block": { "select_id": 1, "cost_info": { "query_cost": "471480.20" }, "nested_loop": [ { "table": { "table_name": "a", "access_type": "ALL", "rows_examined_per_scan": 810030, "rows_produced_per_join": 810030, "filtered": "100.00", "cost_info": { "read_cost": "10127.88", "eval_cost": "81003.00", "prefix_cost": "91130.88", "data_read_per_join": "12M" }, "used_columns": [ "id" ], "materialized_from_subquery": { "using_temporary_table": true, "dependent": false, "cacheable": true, "query_block": { "select_id": 2, "cost_info": { "query_cost": "98323.63" }, "table": { "table_name": "testdo", "access_type": "index", "key": "PRIMARY", "used_key_parts": [ "id" ], "key_length": "8", "rows_examined_per_scan": 962512, "rows_produced_per_join": 962512, "filtered": "100.00", "using_index": true, "cost_info": { "read_cost": "2072.43", "eval_cost": "96251.20", "prefix_cost": "98323.63", "data_read_per_join": "580M" }, "used_columns": [ "id" ] } } } } }, { "table": { "table_name": "t", "access_type": "eq_ref", "possible_keys": [ "PRIMARY" ], "key": "PRIMARY", "used_key_parts": [ "id" ], "key_length": "8", "ref": [ "a.id" ], "rows_examined_per_scan": 1, "rows_produced_per_join": 810030, "filtered": "100.00", "cost_info": { "read_cost": "299346.33", "eval_cost": "81003.00", "prefix_cost": "471480.20", "data_read_per_join": "488M" }, "used_columns": [ "id", "name", "value", "version", "created_at", "created_by", "is_deleted", "modified_at", "modified_by" ] } } ] } } ``` explain ANALYZE ```sql explain ANALYZE select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; ``` result: ``` -> Limit: 200 row(s) (cost=397678.84 rows=30) (actual time=187.429..187.622 rows=30 loops=1) -> Nested loop inner join (cost=397678.84 rows=30) (actual time=187.428..187.621 rows=30 loops=1) -> Table scan on a (cost=98326.73..98329.51 rows=30) (actual time=187.410..187.413 rows=30 loops=1) -> Materialize (cost=98326.63..98326.63 rows=30) (actual time=187.409..187.409 rows=30 loops=1) -> Limit/Offset: 30/810000 row(s) (cost=98323.63 rows=30) (actual time=187.376..187.392 rows=30 loops=1) -> Covering index scan on testdo using PRIMARY (cost=98323.63 rows=962512) (actual time=0.034..167.223 rows=810030 loops=1) -> Single-row index lookup on t using PRIMARY (id=a.id) (cost=0.37 rows=1) (actual time=0.007..0.007 rows=1 loops=30) ``` How to repeat: 1. create table. CREATE TABLE `testdo` ( `id` bigint NOT NULL AUTO_INCREMENT, `name` varchar(50) DEFAULT NULL, `value` int NOT NULL, `version` int NOT NULL , `created_at` datetime DEFAULT NULL, `created_by` varchar(50) DEFAULT NULL, `is_deleted` bit(1) DEFAULT NULL, `modified_at` datetime DEFAULT NULL, `modified_by` varchar(50) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; 2.mock 1000000 rows random data. 3.select * from testdo order by id limit 810000,30; 4.explain format=json select * from testdo order by id limit 810000,30; 5.explain ANALYZE select * from testdo order by id limit 810000,30; 6.select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; 7.explain format=json select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; 8.explain ANALYZE select t.* from (select id from testdo limit 810000,30)a,testdo t where a.id = t.id; Suggested fix: I don't know how it comes up, but the `explain` should work with `limit` .