From 074a5c14d5b7cc15e16ca67e7e49a780a0d5ffb6 Mon Sep 17 00:00:00 2001 From: bsrikanth-mariadb Date: Fri, 20 Feb 2026 15:52:14 +0530 Subject: [PATCH] MDEV-38805:Store optimizer_context into a IS table Currently, optimizer context is written as JSON sub-element in the Optimizer Trace. In this task, we separate it out from the optimizer trace, and instead store it in optimizer_context Information Schema table. The structure of the context is changed to look like below: - ---------------------------------- CREATE TABLE t1 ( ... ); -- in case it is a constant table REPLACE INTO t1 VALUES (...); CREATE TABLE t2 ( ... ); ... set @context='{ JSON with all the captured calls }'; set @optimizer_context='context'; ... the original query; ---------------------------------- The IS can be used to read the current stored context, as well as to dump it to a sql file which can later be replayed in a different environment. It is done like below: - -------------------------------------- set optimizer_record_context=ON; set optimizer_trace=1 -- sample query select into outfile '/tmp/captured-context.sql' context from information_schema.OPTIMIZER_CONTEXT; --------------------------------------- All the existing tests are modified to query OPTIMIZER_CONTEXT IS table --- .../include/get_names_ddls_from_opt_ctx.inc | 15 + mysql-test/include/get_opt_context.inc | 7 + .../get_rec_idx_ranges_from_opt_ctx.inc | 23 + .../run_query_twice_and_compare_stats.inc | 10 +- mysql-test/main/information_schema-big.result | 2 + .../information_schema-big_embedded.result | 2 + mysql-test/main/information_schema.result | 3 + .../information_schema_all_engines.result | 7 +- mysql-test/main/opt_trace_load_stats.result | 214 +++--- mysql-test/main/opt_trace_load_stats.test | 108 +-- .../main/opt_trace_load_stats_innodb.result | 160 ++--- mysql-test/main/opt_trace_store_ddls.result | 633 +++++++++++++----- mysql-test/main/opt_trace_store_ddls.test | 159 ++--- mysql-test/main/opt_trace_store_stats.result | 384 ++++++++--- mysql-test/main/opt_trace_store_stats.test | 102 +-- .../suite/funcs_1/r/is_columns_is.result | 4 + .../funcs_1/r/is_columns_is_embedded.result | 4 + .../suite/funcs_1/r/is_tables_is.result | 50 ++ .../funcs_1/r/is_tables_is_embedded.result | 50 ++ sql/ha_partition.cc | 4 +- sql/handler.h | 1 + sql/opt_store_replay_context.cc | 380 ++++++----- sql/opt_store_replay_context.h | 52 +- sql/sql_class.cc | 2 + sql/sql_class.h | 1 + sql/sql_parse.cc | 5 +- sql/sql_show.cc | 3 + 27 files changed, 1452 insertions(+), 933 deletions(-) create mode 100644 mysql-test/include/get_names_ddls_from_opt_ctx.inc create mode 100644 mysql-test/include/get_opt_context.inc create mode 100644 mysql-test/include/get_rec_idx_ranges_from_opt_ctx.inc diff --git a/mysql-test/include/get_names_ddls_from_opt_ctx.inc b/mysql-test/include/get_names_ddls_from_opt_ctx.inc new file mode 100644 index 0000000000000..8fc294d282cf1 --- /dev/null +++ b/mysql-test/include/get_names_ddls_from_opt_ctx.inc @@ -0,0 +1,15 @@ +set @opt_context= + (select REGEXP_SUBSTR( + context, + '(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') + from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( + context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') + AS ddl from information_schema.optimizer_context); +set @db= + (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, + '$[*]' columns(db_used text path '$')) as jt; +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +select @ddls; \ No newline at end of file diff --git a/mysql-test/include/get_opt_context.inc b/mysql-test/include/get_opt_context.inc new file mode 100644 index 0000000000000..2615acdf67774 --- /dev/null +++ b/mysql-test/include/get_opt_context.inc @@ -0,0 +1,7 @@ +set @opt_context= + (select REGEXP_SUBSTR( + context, + '(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) + from information_schema.optimizer_context + ); \ No newline at end of file diff --git a/mysql-test/include/get_rec_idx_ranges_from_opt_ctx.inc b/mysql-test/include/get_rec_idx_ranges_from_opt_ctx.inc new file mode 100644 index 0000000000000..cf80901d8b010 --- /dev/null +++ b/mysql-test/include/get_rec_idx_ranges_from_opt_ctx.inc @@ -0,0 +1,23 @@ +set @opt_context= + (select REGEXP_SUBSTR( + context, + '(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') + from information_schema.optimizer_context); +set @records= + (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, + '$[*]' columns(num_of_records text path '$')) as jt; +set @indexes= + (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( + @indexes, '$[*][*]' columns(index_name text path '$.index_name', + rec_per_key json path '$.rec_per_key')) as jt; +set @list_ranges= + (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( + @list_ranges, + '$[*][*]' columns(index_name text path '$.index_name', + ranges json path '$.ranges', + num_rows int path '$.num_rows', + max_index_blocks int path '$.max_index_blocks', + max_row_blocks int path '$.max_row_blocks')) as jt; diff --git a/mysql-test/include/run_query_twice_and_compare_stats.inc b/mysql-test/include/run_query_twice_and_compare_stats.inc index 42d8cd4860c2d..431f075bce89b 100644 --- a/mysql-test/include/run_query_twice_and_compare_stats.inc +++ b/mysql-test/include/run_query_twice_and_compare_stats.inc @@ -18,13 +18,11 @@ set optimizer_replay_context=""; echo $explain_query; let $explain_output=`$explain_query`; -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= - (select json_pretty(json_extract( - json_extract(@trace, "$**.optimizer_context"), - '$[0]' - ) - )); + (select REGEXP_SUBSTR( + context, + '(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') + from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; #TO-DO: enable it after MDEV-38033 is resolved diff --git a/mysql-test/main/information_schema-big.result b/mysql-test/main/information_schema-big.result index 4688a2d1f640d..1989e04752512 100644 --- a/mysql-test/main/information_schema-big.result +++ b/mysql-test/main/information_schema-big.result @@ -38,6 +38,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE PARAMETERS SPECIFIC_SCHEMA PARTITIONS TABLE_SCHEMA @@ -105,6 +106,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE PARAMETERS SPECIFIC_SCHEMA PARTITIONS TABLE_SCHEMA diff --git a/mysql-test/main/information_schema-big_embedded.result b/mysql-test/main/information_schema-big_embedded.result index 4b3ad78f997d9..9deca6caaec1c 100644 --- a/mysql-test/main/information_schema-big_embedded.result +++ b/mysql-test/main/information_schema-big_embedded.result @@ -38,6 +38,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE PARAMETERS SPECIFIC_SCHEMA PARTITIONS TABLE_SCHEMA @@ -104,6 +105,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE PARAMETERS SPECIFIC_SCHEMA PARTITIONS TABLE_SCHEMA diff --git a/mysql-test/main/information_schema.result b/mysql-test/main/information_schema.result index 4cf8294f5a214..02cc7859c6b05 100644 --- a/mysql-test/main/information_schema.result +++ b/mysql-test/main/information_schema.result @@ -70,6 +70,7 @@ KEYWORDS KEY_CACHES KEY_COLUMN_USAGE KEY_PERIOD_USAGE +OPTIMIZER_CONTEXT OPTIMIZER_COSTS OPTIMIZER_TRACE PARAMETERS @@ -897,6 +898,8 @@ information_schema COLUMNS COLUMN_DEFAULT information_schema COLUMNS COLUMN_TYPE information_schema COLUMNS GENERATION_EXPRESSION information_schema EVENTS EVENT_DEFINITION +information_schema OPTIMIZER_CONTEXT QUERY +information_schema OPTIMIZER_CONTEXT CONTEXT information_schema OPTIMIZER_TRACE QUERY information_schema OPTIMIZER_TRACE TRACE information_schema PARAMETERS DTD_IDENTIFIER diff --git a/mysql-test/main/information_schema_all_engines.result b/mysql-test/main/information_schema_all_engines.result index d2063d6090a45..2d3c774c4cdca 100644 --- a/mysql-test/main/information_schema_all_engines.result +++ b/mysql-test/main/information_schema_all_engines.result @@ -43,6 +43,7 @@ KEYWORDS KEY_CACHES KEY_COLUMN_USAGE KEY_PERIOD_USAGE +OPTIMIZER_CONTEXT OPTIMIZER_COSTS OPTIMIZER_TRACE PARAMETERS @@ -131,6 +132,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE OPTIMIZER_TRACE QUERY PARAMETERS SPECIFIC_SCHEMA @@ -219,6 +221,7 @@ KEYWORDS WORD KEY_CACHES KEY_CACHE_NAME KEY_COLUMN_USAGE CONSTRAINT_SCHEMA KEY_PERIOD_USAGE CONSTRAINT_SCHEMA +OPTIMIZER_CONTEXT QUERY OPTIMIZER_COSTS ENGINE OPTIMIZER_TRACE QUERY PARAMETERS SPECIFIC_SCHEMA @@ -384,6 +387,7 @@ Database: information_schema | KEY_CACHES | | KEY_COLUMN_USAGE | | KEY_PERIOD_USAGE | +| OPTIMIZER_CONTEXT | | OPTIMIZER_COSTS | | OPTIMIZER_TRACE | | PARAMETERS | @@ -462,6 +466,7 @@ Database: INFORMATION_SCHEMA | KEY_CACHES | | KEY_COLUMN_USAGE | | KEY_PERIOD_USAGE | +| OPTIMIZER_CONTEXT | | OPTIMIZER_COSTS | | OPTIMIZER_TRACE | | PARAMETERS | @@ -501,5 +506,5 @@ Wildcard: inf_rmation_schema | information_schema | SELECT table_schema, count(*) FROM information_schema.TABLES WHERE table_schema IN ('mysql', 'INFORMATION_SCHEMA', 'test', 'mysqltest') GROUP BY TABLE_SCHEMA; table_schema count(*) -information_schema 73 +information_schema 74 mysql 31 diff --git a/mysql-test/main/opt_trace_load_stats.result b/mysql-test/main/opt_trace_load_stats.result index af8e3914fb8cb..0fec61d8cf171 100644 --- a/mysql-test/main/opt_trace_load_stats.result +++ b/mysql-test/main/opt_trace_load_stats.result @@ -28,17 +28,21 @@ a b 0 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 20 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t1_idx_a ["4"] t1_idx_b ["3"] @@ -46,17 +50,15 @@ t1_idx_ab [ "4", "1" ] -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); -select * from json_table( +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( @list_ranges, -'$[*][*]' columns( -index_name text path '$.index_name', +'$[*][*]' columns(index_name text path '$.index_name', ranges json path '$.ranges', num_rows int path '$.num_rows', max_index_blocks int path '$.max_index_blocks', -max_row_blocks int path '$.max_row_blocks' - ) -) as jt; +max_row_blocks int path '$.max_row_blocks')) as jt; index_name ranges num_rows max_index_blocks max_row_blocks t1_idx_a ["(NULL) < (a) < (3)"] 12 1 1 t1_idx_b ["(6) < (b)"] 2 1 1 @@ -76,11 +78,12 @@ a b 0 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 1 @@ -95,11 +98,12 @@ a b 0 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 1 @@ -117,11 +121,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 0 @@ -170,11 +175,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 1 @@ -190,11 +196,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select @opt_context; @opt_context NULL @@ -210,11 +217,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select @opt_context; @opt_context NULL @@ -232,11 +240,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 1 @@ -253,11 +262,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 0 @@ -290,11 +300,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); set @records= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); @@ -329,11 +340,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_1, @opt_context); JSON_EQUALS(@saved_opt_context_1, @opt_context) 1 @@ -350,11 +362,12 @@ a b 2 7 2 7 set @opt_context= -(select json_pretty(json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - )) -from information_schema.optimizer_trace); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); select JSON_EQUALS(@saved_opt_context_2, @opt_context); JSON_EQUALS(@saved_opt_context_2, @opt_context) 1 @@ -367,62 +380,63 @@ set @opt_context=json_remove(@saved_opt_context_1, '$.current_database'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "current_database" element not present at offset 1669. +Warning 4253 Failed to parse saved optimizer context: "current_database" element not present at offset 1404. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].name'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "name" element not present at offset 1676. +Warning 4253 Failed to parse saved optimizer context: "name" element not present at offset 1411. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].ddl'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "ddl" element not present at offset 1456. +Warning 4254 Failed to match the stats from replay context with the optimizer stats: the given list of ranges i.e. [(10) < (a), ] doesn't exist in the list of ranges for table_name db1.t1 and index_name t1_idx_a +Warning 4254 Failed to match the stats from replay context with the optimizer stats: the given list of ranges i.e. [(10) < (a), ] doesn't exist in the list of ranges for table_name db1.t1 and index_name t1_idx_ab set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].num_of_records'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "num_of_records" element not present at offset 1672. +Warning 4253 Failed to parse saved optimizer context: "num_of_records" element not present at offset 1407. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].indexes[0].index_name'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "index_name" element not present at offset 433. +Warning 4253 Failed to parse saved optimizer context: "index_name" element not present at offset 168. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].indexes[0].rec_per_key'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "rec_per_key" element not present at offset 437. +Warning 4253 Failed to parse saved optimizer context: "rec_per_key" element not present at offset 172. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].index_name'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "index_name" element not present at offset 910. +Warning 4253 Failed to parse saved optimizer context: "index_name" element not present at offset 645. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].ranges'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "ranges" element not present at offset 902. +Warning 4253 Failed to parse saved optimizer context: "ranges" element not present at offset 637. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].num_rows'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "num_rows" element not present at offset 920. +Warning 4253 Failed to parse saved optimizer context: "num_rows" element not present at offset 655. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].cost'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "cost" element not present at offset 704. +Warning 4253 Failed to parse saved optimizer context: "cost" element not present at offset 439. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].max_index_blocks'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "max_index_blocks" element not present at offset 913. +Warning 4253 Failed to parse saved optimizer context: "max_index_blocks" element not present at offset 648. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_ranges[0].max_row_blocks'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "max_row_blocks" element not present at offset 915. +Warning 4253 Failed to parse saved optimizer context: "max_row_blocks" element not present at offset 650. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].indexes[0]'); select * from t1 where a > 10; a b @@ -450,62 +464,64 @@ explain select tt1.a, tt2.b from t1 tt1, t1 tt2 where tt1.a = tt2.a; id select_type table type possible_keys key key_len ref rows Extra 1 SIMPLE tt1 index t1_idx_a,t1_idx_ab t1_idx_a 5 NULL 40 Using where; Using index 1 SIMPLE tt2 ref t1_idx_a,t1_idx_ab t1_idx_ab 5 db1.tt1.a 8 Using index -set @saved_opt_context_1= -(select json_extract( -json_extract(trace, "$**.optimizer_context"), -'$[0]' - ) -from information_schema.optimizer_trace); +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)' + ) +from information_schema.optimizer_context +); +set @saved_opt_context_1= @opt_context; set optimizer_replay_context=@opt_context_var_name; set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].key_number'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "key_number" element not present at offset 802. +Warning 4253 Failed to parse saved optimizer context: "key_number" element not present at offset 537. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].num_records'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "num_records" element not present at offset 801. +Warning 4253 Failed to parse saved optimizer context: "num_records" element not present at offset 536. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].eq_ref'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "eq_ref" element not present at offset 806. +Warning 4253 Failed to parse saved optimizer context: "eq_ref" element not present at offset 541. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].index_cost_io'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "index_cost_io" element not present at offset 799. +Warning 4253 Failed to parse saved optimizer context: "index_cost_io" element not present at offset 534. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].index_cost_cpu'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "index_cost_cpu" element not present at offset 788. +Warning 4253 Failed to parse saved optimizer context: "index_cost_cpu" element not present at offset 523. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].row_cost_io'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "row_cost_io" element not present at offset 801. +Warning 4253 Failed to parse saved optimizer context: "row_cost_io" element not present at offset 536. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].row_cost_cpu'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "row_cost_cpu" element not present at offset 790. +Warning 4253 Failed to parse saved optimizer context: "row_cost_cpu" element not present at offset 525. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].max_index_blocks'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "max_index_blocks" element not present at offset 796. +Warning 4253 Failed to parse saved optimizer context: "max_index_blocks" element not present at offset 531. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].max_row_blocks'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "max_row_blocks" element not present at offset 798. +Warning 4253 Failed to parse saved optimizer context: "max_row_blocks" element not present at offset 533. set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].copy_cost'); select * from t1 where a > 10; a b Warnings: -Warning 4253 Failed to parse saved optimizer context: "copy_cost" element not present at offset 803. +Warning 4253 Failed to parse saved optimizer context: "copy_cost" element not present at offset 538. drop table t1; drop database db1; diff --git a/mysql-test/main/opt_trace_load_stats.test b/mysql-test/main/opt_trace_load_stats.test index 8dbbfeb62f006..2b4c6a83b97d7 100644 --- a/mysql-test/main/opt_trace_load_stats.test +++ b/mysql-test/main/opt_trace_load_stats.test @@ -29,28 +29,7 @@ analyze table t1 persistent for all; --echo # select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); - -set @records= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); -select * from json_table( - @list_ranges, - '$[*][*]' columns( - index_name text path '$.index_name', - ranges json path '$.ranges', - num_rows int path '$.num_rows', - max_index_blocks int path '$.max_index_blocks', - max_row_blocks int path '$.max_row_blocks' - ) -) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc set @saved_opt_context_1=@opt_context; set @saved_records_1=@records; @@ -66,12 +45,7 @@ set optimizer_replay_context=@saved_opt_context_var_name_1; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -84,12 +58,7 @@ set optimizer_replay_context=""; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -104,12 +73,7 @@ insert into t1 select seq%5, seq%8 from seq_1_to_20; --echo # select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -145,12 +109,7 @@ set optimizer_replay_context=@saved_opt_context_var_name_1; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -162,12 +121,7 @@ set optimizer_record_context=OFF; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select @opt_context; @@ -179,12 +133,7 @@ set optimizer_trace=0; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select @opt_context; @@ -198,12 +147,7 @@ set optimizer_record_context=ON; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -216,12 +160,7 @@ set optimizer_replay_context=""; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -244,12 +183,7 @@ analyze table t1 persistent for all; --echo # select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc set @records= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); @@ -271,12 +205,7 @@ set optimizer_replay_context=@saved_opt_context_var_name_1; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_1, @opt_context); @@ -289,12 +218,7 @@ set optimizer_replay_context=""; select * from t1 where a < 3 and b > 6; -set @opt_context= - (select json_pretty(json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - )) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc select JSON_EQUALS(@saved_opt_context_2, @opt_context); @@ -353,12 +277,8 @@ set optimizer_replay_context=""; explain select tt1.a, tt2.b from t1 tt1, t1 tt2 where tt1.a = tt2.a; -set @saved_opt_context_1= - (select json_extract( - json_extract(trace, "$**.optimizer_context"), - '$[0]' - ) - from information_schema.optimizer_trace); +--source include/get_opt_context.inc +set @saved_opt_context_1= @opt_context; set optimizer_replay_context=@opt_context_var_name; set @opt_context=json_remove(@saved_opt_context_1, '$.list_contexts[0].list_index_read_costs[0].key_number'); diff --git a/mysql-test/main/opt_trace_load_stats_innodb.result b/mysql-test/main/opt_trace_load_stats_innodb.result index 3fd1b0594e446..347cbf2405a66 100644 --- a/mysql-test/main/opt_trace_load_stats_innodb.result +++ b/mysql-test/main/opt_trace_load_stats_innodb.result @@ -43,13 +43,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 a, t1 b where a.c1 < 3 and b.c1 < 333 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -138,13 +136,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 a, t1 b where a.c1 < 3 and b.c1 < 333 and a.c2 < 3 and b.c2 < 333 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -276,13 +272,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -360,13 +354,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -443,13 +435,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 and tt1.c2 = tt2.c2 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -542,13 +532,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 and tt1.c2 = tt2.c2 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -631,13 +619,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -724,13 +710,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c1 = tt2.c1 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -807,13 +791,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1, t1 as tt2 where tt1.c2 = tt2.c2 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -887,13 +869,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c1 = 5 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -957,13 +937,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c1 = 5 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -1027,13 +1005,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c1 = 5 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -1097,13 +1073,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c2 = 5 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -1160,13 +1134,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c2 = 5 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -1225,13 +1197,11 @@ Table Op Msg_type Msg_text db1.t1 analyze status OK set optimizer_replay_context=""; explain format=json select * from t1 as tt1 where tt1.c1 = 5 OR tt1.c2 = 10 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); @@ -1315,13 +1285,11 @@ index idx_ac(a, c) insert into t1 select seq%2, seq%3, seq%5 from seq_1_to_20; set optimizer_replay_context=""; explain format=json select * from t1 where a=1 and b=1 and c=1 -set @trace= (select trace from information_schema.optimizer_trace); set @saved_opt_context= -(select json_pretty(json_extract( -json_extract(@trace, "$**.optimizer_context"), -'$[0]' - ) -)); +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); set @saved_opt_context_var_name='saved_opt_context'; set @explain_output='$explain_output'; set @explain_output= (select json_pretty(round_cost(@explain_output))); diff --git a/mysql-test/main/opt_trace_store_ddls.result b/mysql-test/main/opt_trace_store_ddls.result index 3c2c1dc78fd5d..782e4515bdec4 100644 --- a/mysql-test/main/opt_trace_store_ddls.result +++ b/mysql-test/main/opt_trace_store_ddls.result @@ -1,23 +1,17 @@ set optimizer_record_context=ON; -# show variables like 'optimizer_record_context'; Variable_name Value optimizer_record_context ON -# set optimizer_record_context=OFF; -# show variables like 'optimizer_record_context'; Variable_name Value optimizer_record_context OFF -# create database db1; use db1; create table t1 (a int, b int); insert into t1 values (1,2),(2,3); -# create table t2 (a int); insert into t2 values (1),(2); -# create view view1 as (select t1.a as a, t1.b as b, t2.a as c from (t1 join t2) where t1.a = t2.a); # # disable both optimizer_trace and optimizer_record_context @@ -25,67 +19,114 @@ create view view1 as (select t1.a as a, t1.b as b, t2.a as c from (t1 join t2) w # set optimizer_trace=0; set optimizer_record_context=OFF; -# select * from t1 where t1.a = 3; a b -# -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +select @ddls; +@ddls +NULL # # disable optimizer_trace, but enable optimizer_record_context # there should be no trace here as well # set optimizer_record_context=ON; -# select * from t1 where t1.a = 3; a b -# -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +select @ddls; +@ddls +NULL # # enable optimizer_trace, but disable optimizer_record_context # trace result should be empty # set optimizer_trace=1; set optimizer_record_context=OFF; -# select * from t1 where t1.a = 3; a b -# -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +select @ddls; +@ddls +NULL # # enable both optimizer_trace and optimizer_record_context # trace result should have 1 ddl statement for table t1 # set optimizer_trace=1; set optimizer_record_context=ON; -# select * from t1 where t1.a = 3; a b -# -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # enable both optimizer_trace and optimizer_record_context # test for view @@ -94,19 +135,39 @@ CREATE TABLE `t1` ( set optimizer_record_context=ON; select * from view1 where view1.a = 3; a b c -# -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.t1 +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci -CREATE ALGORITHM=UNDEFINED DEFINER=`root`@`localhost` SQL SECURITY DEFINER VIEW db1.view1 AS (select `db1`.`t1`.`a` AS `a`,`db1`.`t1`.`b` AS `b`,`db1`.`t2`.`a` AS `c` from (`db1`.`t1` join `db1`.`t2`) where `db1`.`t1`.`a` = `db1`.`t2`.`a`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + +CREATE ALGORITHM=UNDEFINED DEFINER=`root`@`localhost` SQL SECURITY DEFINER VIEW db1.view1 AS (select `db1`.`t1`.`a` AS `a`,`db1`.`t1`.`b` AS `b`,`db1`.`t2`.`a` AS `c` from (`db1`.`t1` join `db1`.`t2`) where `db1`.`t1`.`a` = `db1`.`t2`.`a`); + + # # enable both optimizer_trace and optimizer_record_context # test for temp table @@ -114,17 +175,36 @@ CREATE ALGORITHM=UNDEFINED DEFINER=`root`@`localhost` SQL SECURITY DEFINER VIEW # create temporary table temp1(col1 int); insert into temp1 select * from t2; -# -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.temp1 +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TEMPORARY TABLE `temp1` ( `col1` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # there should be no duplicate ddls # there should be only 1 ddl for table t2 @@ -133,82 +213,192 @@ select * from t2 union select * from t2 union select * from t2; a 1 2 -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # there should be no duplicate ddls # there should be only 3 ddls for tables t1, t2, and view1 # select * from view1 where view1.a = 3 union select * from view1 where view1.a = 3; a b c -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.t1 +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci -CREATE ALGORITHM=UNDEFINED DEFINER=`root`@`localhost` SQL SECURITY DEFINER VIEW db1.view1 AS (select `db1`.`t1`.`a` AS `a`,`db1`.`t1`.`b` AS `b`,`db1`.`t2`.`a` AS `c` from (`db1`.`t1` join `db1`.`t2`) where `db1`.`t1`.`a` = `db1`.`t2`.`a`) +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + +CREATE ALGORITHM=UNDEFINED DEFINER=`root`@`localhost` SQL SECURITY DEFINER VIEW db1.view1 AS (select `db1`.`t1`.`a` AS `a`,`db1`.`t1`.`b` AS `b`,`db1`.`t2`.`a` AS `c` from (`db1`.`t1` join `db1`.`t2`) where `db1`.`t1`.`a` = `db1`.`t2`.`a`); + + # # test for insert # there should be no trace for insert with values # insert into t1 values ((select max(t2.a) from t2), (select min(t2.a) from t2)); -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +select @ddls; +@ddls +NULL # # test for delete # trace result should have 1 ddl statement for table t1 # delete from t1 where t1.a=3; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t1 +select @ddls; +@ddls CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # test for update # trace result should have 1 ddl statement for table t1 # update t1 set t1.b = t1.a; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t1 +select @ddls; +@ddls CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # test for insert as select # trace result should have 2 ddl statements for tables t1, t2 # insert into t1 (select t2.a as a, t2.a as b from t2); -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.t1 +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + create database db2; use db2; create table t1(a int); @@ -223,26 +413,37 @@ select db1_t1.b FROM t1 AS db1_t1, db2.t1 AS db2_t1 WHERE db1_t1.a = db2_t1.a AND db1_t1.a >= 3; b -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db2.t1 db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `db2`.`t1` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # use database db2 # test to select 2 tables with same name but from 2 databases @@ -253,26 +454,37 @@ select db1_t1.b FROM db1.t1 AS db1_t1, db2.t1 AS db2_t1 WHERE db1_t1.a = db2_t1.a AND db1_t1.a >= 3; b -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db2 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db2.t1 db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `db1`.`t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # # use database db2 # test to select from 2 tables from 2 different databases, @@ -283,22 +495,32 @@ select t1.b FROM db1.t1 AS t1, mysql.db AS t2 WHERE t1.a >= 3; b -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db2 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `db1`.`t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + use db1; drop table db2.t1; drop database db2; @@ -315,28 +537,41 @@ explain select * from t1, t2 where t2.a=1 and t1.b=t2.b; id select_type table type possible_keys key key_len ref rows Extra 1 SIMPLE t2 const PRIMARY PRIMARY 4 const 1 1 SIMPLE t1 ALL NULL NULL NULL NULL 15 Using where -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db1.t2 db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `t2` ( `a` int(11) NOT NULL, `b` int(11) DEFAULT NULL, PRIMARY KEY (`a`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + +REPLACE INTO db1.t2(a, b) VALUES (1, 1); + CREATE TABLE `t1` ( `a` int(11) DEFAULT NULL, `b` int(11) DEFAULT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + drop table t1; drop table t2; # @@ -350,16 +585,25 @@ create table t11 (a int primary key, b varchar(10)); insert into t11 values (1, 'one'),(2, 'two'); select t10.b, t11.a from t10, t11 where t10.a = t11.c + 10; ERROR 42S22: Unknown column 't11.c' in 'WHERE' -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls +NULL drop table t10; drop table t11; # @@ -382,18 +626,26 @@ explain select * from t1 partition (p1) where a=10; id select_type table type possible_keys key key_len ref rows Extra 1 SIMPLE t1 ref a a 5 const 49 -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `t1` ( `pk` int(11) NOT NULL, `a` int(11) DEFAULT NULL, @@ -402,7 +654,9 @@ CREATE TABLE `t1` ( ) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci PARTITION BY RANGE (`pk`) (PARTITION `p0` VALUES LESS THAN (10) ENGINE = MyISAM, - PARTITION `p1` VALUES LESS THAN MAXVALUE ENGINE = MyISAM) + PARTITION `p1` VALUES LESS THAN MAXVALUE ENGINE = MyISAM); + + drop table t1; # # test with insert delayed @@ -420,17 +674,25 @@ insert into t1 (a,b) values (10,20); insert into t1 (a,b,c) values (100,200,400); truncate table t1; insert delayed into t1 values (); -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used -db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls +NULL drop table t1; # # test primary, and foreign key tables @@ -452,29 +714,40 @@ from t1,t2 where t1.id = t2.id; name address abc address1 xyz address2 -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; db_used db1 -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; name db1.t2 db1.t1 -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +select @ddls; +@ddls CREATE TABLE `t2` ( `id` int(11) DEFAULT NULL, `address` varchar(10) DEFAULT NULL, KEY `fk_id` (`id`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(10) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + drop table t1; drop table t2; # @@ -486,31 +759,71 @@ create table t2(id2 int not null); insert into t1 values (1),(2); insert into t2 values (1),(1),(2),(2); delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.t1 +select @ddls; +@ddls CREATE TABLE `t2` ( `id2` int(11) NOT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `id1` int(11) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id1`) -) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + # rerun the same delete query # Now, trace result should have the ddls for all 2 tables, # even though no data is deleted delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; -ddl +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @ddls= (select REGEXP_SUBSTR( +context, '(CREATE.*)([\n\r].*)*(?=(set @opt|REPLACE INTO))') +AS ddl from information_schema.optimizer_context); +set @db= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.current_database'))); +select db_used from json_table(@db, +'$[*]' columns(db_used text path '$')) as jt; +db_used +db1 +set @fn= (select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.name'))); +select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; +name +db1.t2 +db1.t1 +select @ddls; +@ddls CREATE TABLE `t2` ( `id2` int(11) NOT NULL -) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + CREATE TABLE `t1` ( `id1` int(11) NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id1`) -) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci +) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci; + + drop table t1, t2; drop database db1; diff --git a/mysql-test/main/opt_trace_store_ddls.test b/mysql-test/main/opt_trace_store_ddls.test index fea1eeecb30fc..8bc0dd937cd0e 100644 --- a/mysql-test/main/opt_trace_store_ddls.test +++ b/mysql-test/main/opt_trace_store_ddls.test @@ -4,25 +4,19 @@ --source include/no_view_protocol.inc set optimizer_record_context=ON; ---echo # show variables like 'optimizer_record_context'; ---echo # set optimizer_record_context=OFF; ---echo # show variables like 'optimizer_record_context'; ---echo # create database db1; use db1; create table t1 (a int, b int); insert into t1 values (1,2),(2,3); ---echo # create table t2 (a int); insert into t2 values (1),(2); ---echo # create view view1 as (select t1.a as a, t1.b as b, t2.a as c from (t1 join t2) where t1.a = t2.a); --echo # @@ -31,12 +25,9 @@ create view view1 as (select t1.a as a, t1.b as b, t2.a as c from (t1 join t2) w --echo # set optimizer_trace=0; set optimizer_record_context=OFF; ---echo # + select * from t1 where t1.a = 3; ---echo # -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # disable optimizer_trace, but enable optimizer_record_context @@ -44,12 +35,10 @@ from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; --echo # set optimizer_record_context=ON; ---echo # + select * from t1 where t1.a = 3; ---echo # -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # enable optimizer_trace, but disable optimizer_record_context @@ -57,12 +46,10 @@ from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; --echo # set optimizer_trace=1; set optimizer_record_context=OFF; ---echo # + select * from t1 where t1.a = 3; ---echo # -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # enable both optimizer_trace and optimizer_record_context @@ -70,16 +57,10 @@ from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; --echo # set optimizer_trace=1; set optimizer_record_context=ON; ---echo # + select * from t1 where t1.a = 3; ---echo # -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # enable both optimizer_trace and optimizer_record_context @@ -87,12 +68,10 @@ select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; --echo # trace result should have 3 ddl statements --echo # set optimizer_record_context=ON; + select * from view1 where view1.a = 3; ---echo # -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # enable both optimizer_trace and optimizer_record_context @@ -102,65 +81,55 @@ from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; create temporary table temp1(col1 int); insert into temp1 select * from t2; ---echo # - -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # there should be no duplicate ddls --echo # there should be only 1 ddl for table t2 --echo # select * from t2 union select * from t2 union select * from t2; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # there should be no duplicate ddls --echo # there should be only 3 ddls for tables t1, t2, and view1 --echo # select * from view1 where view1.a = 3 union select * from view1 where view1.a = 3; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # test for insert --echo # there should be no trace for insert with values --echo # insert into t1 values ((select max(t2.a) from t2), (select min(t2.a) from t2)); -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # test for delete --echo # trace result should have 1 ddl statement for table t1 --echo # delete from t1 where t1.a=3; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # test for update --echo # trace result should have 1 ddl statement for table t1 --echo # update t1 set t1.b = t1.a; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # test for insert as select --echo # trace result should have 2 ddl statements for tables t1, t2 --echo # insert into t1 (select t2.a as a, t2.a as b from t2); -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc create database db2; use db2; @@ -177,13 +146,8 @@ select db1_t1.b FROM t1 AS db1_t1, db2.t1 AS db2_t1 WHERE db1_t1.a = db2_t1.a AND db1_t1.a >= 3; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # use database db2 @@ -196,13 +160,8 @@ select db1_t1.b FROM db1.t1 AS db1_t1, db2.t1 AS db2_t1 WHERE db1_t1.a = db2_t1.a AND db1_t1.a >= 3; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; + +--source include/get_names_ddls_from_opt_ctx.inc --echo # --echo # use database db2 @@ -214,13 +173,7 @@ select t1.b FROM db1.t1 AS t1, mysql.db AS t2 WHERE t1.a >= 3; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc use db1; drop table db2.t1; @@ -238,13 +191,7 @@ insert into t2 select seq, seq from seq_1_to_10; explain select * from t1, t2 where t2.a=1 and t1.b=t2.b; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc drop table t1; drop table t2; @@ -262,13 +209,7 @@ insert into t11 values (1, 'one'),(2, 'two'); --error ER_BAD_FIELD_ERROR select t10.b, t11.a from t10, t11 where t10.a = t11.c + 10; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc drop table t10; drop table t11; @@ -293,13 +234,8 @@ flush tables; explain select * from t1 partition (p1) where a=10; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc + drop table t1; --echo # @@ -321,13 +257,8 @@ insert into t1 (a,b,c) values (100,200,400); truncate table t1; insert delayed into t1 values (); -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc + drop table t1; --echo # @@ -352,13 +283,7 @@ insert into t2 values (1, 'address1'), (2, 'address2'); select t1.name, t2.address from t1,t2 where t1.id = t2.id; -set @trace= (select trace from information_schema.optimizer_trace); -set @db=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.current_database'))); -select db_used from json_table(@db, '$[*]' columns(db_used text path '$')) as jt; -set @fn=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.name'))); -select name from json_table(@fn, '$[*]' columns(name text path '$')) as jt; -set @ddls=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.ddl'))); -select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc drop table t1; drop table t2; @@ -376,18 +301,14 @@ insert into t2 values (1),(1),(2),(2); delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc --echo # rerun the same delete query --echo # Now, trace result should have the ddls for all 2 tables, --echo # even though no data is deleted delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2; -set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace); -select ddl -from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt; +--source include/get_names_ddls_from_opt_ctx.inc drop table t1, t2; diff --git a/mysql-test/main/opt_trace_store_stats.result b/mysql-test/main/opt_trace_store_stats.result index c5d63992d07eb..4a4e8282f437a 100644 --- a/mysql-test/main/opt_trace_store_stats.result +++ b/mysql-test/main/opt_trace_store_stats.result @@ -35,13 +35,22 @@ db1.t2 analyze status Table is already up to date select count(*) from t1; count(*) 20 -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 0 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t1_idx_a ["10"] t1_idx_b ["7"] @@ -49,20 +58,39 @@ t1_idx_ab [ "10", "3" ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks # # simple query using join of two tables # select count(*) from t1, t2 where t1.a = t2.a; count(*) 100 -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 30 20 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t2_idx_a ["5"] t1_idx_a ["10"] @@ -71,6 +99,16 @@ t1_idx_ab [ "10", "3" ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks # # negative test # simple query using join of two tables @@ -80,27 +118,55 @@ set optimizer_record_context=OFF; select count(*) from t1, t2 where t1.a = t2.a; count(*) 100 -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks set optimizer_record_context=ON; # # there should be no duplicate information # select * from view1 union select * from view1; a b c -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 30 20 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t2_idx_a ["5"] t1_idx_a ["10"] @@ -109,21 +175,42 @@ t1_idx_ab [ "10", "3" ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks +t2_idx_a ["(5) <= (a) <= (5)"] 5 1 1 +t2_idx_a ["(5) <= (a) <= (5)"] 5 1 1 +t1_idx_a ["(5) <= (a) <= (5)"] 1 1 1 +t1_idx_ab ["(5) <= (a) <= (5)"] 1 1 1 +t1_idx_a ["(5) <= (a) <= (5)"] 1 1 1 +t1_idx_ab ["(5) <= (a) <= (5)"] 1 1 1 # # test for update # update t1 set t1.b = t1.a; -analyze table t1 persistent for all; -Table Op Msg_type Msg_text -db1.t1 analyze status Engine-independent statistics collected -db1.t1 analyze status OK -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 20 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t1_idx_a ["10"] t1_idx_b ["7"] @@ -131,30 +218,59 @@ t1_idx_ab [ "10", "3" ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks # # test for insert as select # insert into t1 (select t2.a as a, t2.a as b from t2); -analyze table t1 persistent for all; -Table Op Msg_type Msg_text -db1.t1 analyze status Engine-independent statistics collected -db1.t1 analyze status OK -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 30 20 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t2_idx_a ["5"] t1_idx_a ["10"] -t1_idx_b ["10"] +t1_idx_b ["7"] t1_idx_ab [ "10", - "10" + "3" ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( +@list_ranges, +'$[*][*]' columns(index_name text path '$.index_name', +ranges json path '$.ranges', +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks +analyze table t1 persistent for all; +Table Op Msg_type Msg_text +db1.t1 analyze status Engine-independent statistics collected +db1.t1 analyze status OK # # range analysis tests # @@ -164,45 +280,89 @@ t1_idx_ab [ analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; id select_type table type possible_keys key key_len ref rows r_rows filtered r_filtered Extra 1 SIMPLE t1 index t1_idx_a,t1_idx_b,t1_idx_ab t1_idx_ab 10 NULL 50 50.00 100.00 70.00 Using where; Using index -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; +num_of_records +50 +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; +index_name rec_per_key +t1_idx_a ["8"] +t1_idx_b ["8"] +t1_idx_ab [ + "8", + "8" + ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( @list_ranges, -'$[*][*]' columns( -index_name text path '$.index_name', +'$[*][*]' columns(index_name text path '$.index_name', ranges json path '$.ranges', -num_rows int path '$.num_rows' - ) -) as jt; -index_name ranges num_rows -t1_idx_a ["(1) <= (a) <= (5)"] 35 -t1_idx_ab ["(1) <= (a) <= (5)"] 35 -t1_idx_b ["(6) <= (b) <= (10)"] 1 +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks +t1_idx_a ["(1) <= (a) <= (5)"] 35 1 1 +t1_idx_ab ["(1) <= (a) <= (5)"] 35 1 1 +t1_idx_b ["(6) <= (b) <= (10)"] 1 1 1 # # simple query with or condition on the same column # analyze select * from t1 where t1.a between 1 and 5 or t1.a between 6 and 10; id select_type table type possible_keys key key_len ref rows r_rows filtered r_filtered Extra 1 SIMPLE t1 range t1_idx_a,t1_idx_ab t1_idx_ab 5 NULL 36 35.00 100.00 100.00 Using where; Using index -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; +num_of_records +50 +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; +index_name rec_per_key +t1_idx_a ["8"] +t1_idx_b ["8"] +t1_idx_ab [ + "8", + "8" + ] +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( @list_ranges, -'$[*][*]' columns( -index_name text path '$.index_name', +'$[*][*]' columns(index_name text path '$.index_name', ranges json path '$.ranges', -num_rows int path '$.num_rows' - ) -) as jt; -index_name ranges num_rows +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks t1_idx_a [ "(1) <= (a) <= (5)", "(6) <= (a) <= (10)" - ] 36 + ] 36 2 1 t1_idx_ab [ "(1) <= (a) <= (5)", "(6) <= (a) <= (10)" - ] 36 + ] 36 2 1 # # negative test on the simple query with or condition on 2 columns # @@ -210,17 +370,32 @@ set optimizer_record_context=OFF; analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; id select_type table type possible_keys key key_len ref rows r_rows filtered r_filtered Extra 1 SIMPLE t1 index t1_idx_a,t1_idx_b,t1_idx_ab t1_idx_ab 10 NULL 50 50.00 100.00 70.00 Using where; Using index -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; +num_of_records +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; +index_name rec_per_key +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( @list_ranges, -'$[*][*]' columns( -index_name text path '$.index_name', +'$[*][*]' columns(index_name text path '$.index_name', ranges json path '$.ranges', -num_rows int path '$.num_rows' - ) -) as jt; -index_name ranges num_rows +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks set optimizer_record_context=ON; # # simple query with or condition on 2 columns @@ -229,13 +404,22 @@ set optimizer_record_context=ON; analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; id select_type table type possible_keys key key_len ref rows r_rows filtered r_filtered Extra 1 SIMPLE t1 index t1_idx_a,t1_idx_b,t1_idx_ab t1_idx_ab 10 NULL 50 50.00 100.00 70.00 Using where; Using index -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; +set @opt_context= +(select REGEXP_SUBSTR( +context, +'(?<=set @opt_context=\')([\n\r].*)*(?=\'\;--opt_context_ends)') +from information_schema.optimizer_context); +set @records= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.num_of_records'))); +select *from json_table(@records, +'$[*]' columns(num_of_records text path '$')) as jt; num_of_records 50 -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +set @indexes= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.indexes'))); +select *from json_table( +@indexes, '$[*][*]' columns(index_name text path '$.index_name', +rec_per_key json path '$.rec_per_key')) as jt; index_name rec_per_key t1_idx_a ["8"] t1_idx_b ["8"] @@ -243,19 +427,19 @@ t1_idx_ab [ "8", "8" ] -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( +set @list_ranges= +(select JSON_DETAILED(JSON_EXTRACT(@opt_context, '$**.list_ranges'))); +select *from json_table( @list_ranges, -'$[*][*]' columns( -index_name text path '$.index_name', +'$[*][*]' columns(index_name text path '$.index_name', ranges json path '$.ranges', -num_rows int path '$.num_rows' - ) -) as jt; -index_name ranges num_rows -t1_idx_a ["(1) <= (a) <= (5)"] 35 -t1_idx_ab ["(1) <= (a) <= (5)"] 35 -t1_idx_b ["(6) <= (b) <= (10)"] 1 +num_rows int path '$.num_rows', +max_index_blocks int path '$.max_index_blocks', +max_row_blocks int path '$.max_row_blocks')) as jt; +index_name ranges num_rows max_index_blocks max_row_blocks +t1_idx_a ["(1) <= (a) <= (5)"] 35 1 1 +t1_idx_ab ["(1) <= (a) <= (5)"] 35 1 1 +t1_idx_b ["(6) <= (b) <= (10)"] 1 1 1 drop view view1; drop table t1; drop table t2; @@ -279,19 +463,19 @@ id select_type table type possible_keys key key_len ref rows r_rows filtered r_f 1 PRIMARY t1 const PRIMARY PRIMARY 4 const 1 NULL 100.00 NULL 2 UNION t1 const PRIMARY PRIMARY 4 const 1 NULL 100.00 NULL NULL UNION RESULT ALL NULL NULL NULL NULL NULL 2.00 NULL NULL -set @trace= (select trace from information_schema.optimizer_trace); -set @const_table_inserts= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.const_table_inserts'))); +set @const_table_inserts= +(select REGEXP_SUBSTR( +context, +'(REPLACE INTO.*)([\n\r].*)*(?=set @opt_context)' + ) +from information_schema.optimizer_context +); select @const_table_inserts; @const_table_inserts -[ - [ - "REPLACE INTO db1.t1(a, b) VALUES (5, 0)", - "REPLACE INTO db1.t1(a, b) VALUES (4, 4)" - ] -] -select * from json_table(@const_table_inserts, '$[*][*]' columns(insert_stmt text path '$')) as jt; -insert_stmt -REPLACE INTO db1.t1(a, b) VALUES (5, 0) -REPLACE INTO db1.t1(a, b) VALUES (4, 4) +REPLACE INTO db1.t1(a, b) VALUES (5, 0); + +REPLACE INTO db1.t1(a, b) VALUES (4, 4); + + drop table t1; drop database db1; diff --git a/mysql-test/main/opt_trace_store_stats.test b/mysql-test/main/opt_trace_store_stats.test index 975b4132cb743..59e991919902d 100644 --- a/mysql-test/main/opt_trace_store_stats.test +++ b/mysql-test/main/opt_trace_store_stats.test @@ -28,6 +28,7 @@ create view view1 as ( ); --echo # analyze all the tables + set session use_stat_tables='COMPLEMENTARY'; analyze table t1 persistent for all; analyze table t2 persistent for all; @@ -37,22 +38,14 @@ analyze table t2 persistent for all; --echo # select count(*) from t1; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # simple query using join of two tables --echo # select count(*) from t1, t2 where t1.a = t2.a; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # negative test @@ -62,11 +55,7 @@ select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.ind set optimizer_record_context=OFF; select count(*) from t1, t2 where t1.a = t2.a; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc set optimizer_record_context=ON; @@ -74,35 +63,24 @@ set optimizer_record_context=ON; --echo # there should be no duplicate information --echo # select * from view1 union select * from view1; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; + +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # test for update --echo # update t1 set t1.b = t1.a; -analyze table t1 persistent for all; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # test for insert as select --echo # insert into t1 (select t2.a as a, t2.a as b from t2); -analyze table t1 persistent for all; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc + +analyze table t1 persistent for all; --echo # --echo # range analysis tests @@ -113,32 +91,14 @@ select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.ind --echo # analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( - @list_ranges, - '$[*][*]' columns( - index_name text path '$.index_name', - ranges json path '$.ranges', - num_rows int path '$.num_rows' - ) -) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # simple query with or condition on the same column --echo # analyze select * from t1 where t1.a between 1 and 5 or t1.a between 6 and 10; -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( - @list_ranges, - '$[*][*]' columns( - index_name text path '$.index_name', - ranges json path '$.ranges', - num_rows int path '$.num_rows' - ) -) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc --echo # --echo # negative test on the simple query with or condition on 2 columns @@ -146,16 +106,7 @@ select * from json_table( set optimizer_record_context=OFF; analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; -set @trace= (select trace from information_schema.optimizer_trace); -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( - @list_ranges, - '$[*][*]' columns( - index_name text path '$.index_name', - ranges json path '$.ranges', - num_rows int path '$.num_rows' - ) -) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc set optimizer_record_context=ON; @@ -165,20 +116,7 @@ set optimizer_record_context=ON; --echo # analyze select * from t1 where t1.a between 1 and 5 or t1.b between 6 and 10; -set @trace= (select trace from information_schema.optimizer_trace); -set @records= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.num_of_records'))); -select * from json_table(@records, '$[*]' columns(num_of_records text path '$')) as jt; -set @indexes=(select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.indexes'))); -select * from json_table(@indexes, '$[*][*]' columns(index_name text path '$.index_name', rec_per_key json path '$.rec_per_key')) as jt; -set @list_ranges= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.list_ranges'))); -select * from json_table( - @list_ranges, - '$[*][*]' columns( - index_name text path '$.index_name', - ranges json path '$.ranges', - num_rows int path '$.num_rows' - ) -) as jt; +--source include/get_rec_idx_ranges_from_opt_ctx.inc drop view view1; drop table t1; @@ -200,12 +138,18 @@ insert into t1 select seq, seq%5 from seq_1_to_20; analyze table t1 persistent for all; set optimizer_record_context=ON; + analyze select * from t1 where t1.a=5 and t1.b=0 union select * from t1 where t1.a=4 and t1.b=4; -set @trace= (select trace from information_schema.optimizer_trace); -set @const_table_inserts= (select JSON_DETAILED(JSON_EXTRACT(@trace, '$**.const_table_inserts'))); +set @const_table_inserts= + (select REGEXP_SUBSTR( + context, + '(REPLACE INTO.*)([\n\r].*)*(?=set @opt_context)' + ) + from information_schema.optimizer_context + ); + select @const_table_inserts; -select * from json_table(@const_table_inserts, '$[*][*]' columns(insert_stmt text path '$')) as jt; drop table t1; drop database db1; diff --git a/mysql-test/suite/funcs_1/r/is_columns_is.result b/mysql-test/suite/funcs_1/r/is_columns_is.result index 96d7df2d45bfd..5f76f1475757b 100644 --- a/mysql-test/suite/funcs_1/r/is_columns_is.result +++ b/mysql-test/suite/funcs_1/r/is_columns_is.result @@ -224,6 +224,8 @@ def information_schema KEY_PERIOD_USAGE PERIOD_NAME 7 NULL NO varchar 64 192 NUL def information_schema KEY_PERIOD_USAGE TABLE_CATALOG 4 NULL NO varchar 512 1536 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(512) select NEVER NULL NO NO def information_schema KEY_PERIOD_USAGE TABLE_NAME 6 NULL NO varchar 64 192 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(64) select NEVER NULL NO NO def information_schema KEY_PERIOD_USAGE TABLE_SCHEMA 5 NULL NO varchar 64 192 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(64) select NEVER NULL NO NO +def information_schema OPTIMIZER_CONTEXT CONTEXT 2 NULL NO longtext 4294967295 4294967295 NULL NULL NULL utf8mb3 utf8mb3_general_ci longtext select NEVER NULL NO NO +def information_schema OPTIMIZER_CONTEXT QUERY 1 NULL NO longtext 4294967295 4294967295 NULL NULL NULL utf8mb3 utf8mb3_general_ci longtext select NEVER NULL NO NO def information_schema OPTIMIZER_COSTS ENGINE 1 NULL NO varchar 192 576 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(192) select NEVER NULL NO NO def information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_COST 2 NULL NO decimal NULL NULL 9 6 NULL NULL NULL decimal(9,6) select NEVER NULL NO NO def information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_RATIO 8 NULL NO decimal NULL NULL 9 6 NULL NULL NULL decimal(9,6) select NEVER NULL NO NO @@ -911,6 +913,8 @@ NULL information_schema KEY_COLUMN_USAGE POSITION_IN_UNIQUE_CONSTRAINT bigint NU 3.0000 information_schema KEY_PERIOD_USAGE TABLE_SCHEMA varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) 3.0000 information_schema KEY_PERIOD_USAGE TABLE_NAME varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) 3.0000 information_schema KEY_PERIOD_USAGE PERIOD_NAME varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) +1.0000 information_schema OPTIMIZER_CONTEXT QUERY longtext 4294967295 4294967295 utf8mb3 utf8mb3_general_ci longtext +1.0000 information_schema OPTIMIZER_CONTEXT CONTEXT longtext 4294967295 4294967295 utf8mb3 utf8mb3_general_ci longtext 3.0000 information_schema OPTIMIZER_COSTS ENGINE varchar 192 576 utf8mb3 utf8mb3_general_ci varchar(192) NULL information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_COST decimal NULL NULL NULL NULL decimal(9,6) NULL information_schema OPTIMIZER_COSTS OPTIMIZER_INDEX_BLOCK_COPY_COST decimal NULL NULL NULL NULL decimal(9,6) diff --git a/mysql-test/suite/funcs_1/r/is_columns_is_embedded.result b/mysql-test/suite/funcs_1/r/is_columns_is_embedded.result index a5d258d98f48d..d67cd03728052 100644 --- a/mysql-test/suite/funcs_1/r/is_columns_is_embedded.result +++ b/mysql-test/suite/funcs_1/r/is_columns_is_embedded.result @@ -224,6 +224,8 @@ def information_schema KEY_PERIOD_USAGE PERIOD_NAME 7 NULL NO varchar 64 192 NUL def information_schema KEY_PERIOD_USAGE TABLE_CATALOG 4 NULL NO varchar 512 1536 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(512) NEVER NULL NO NO def information_schema KEY_PERIOD_USAGE TABLE_NAME 6 NULL NO varchar 64 192 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(64) NEVER NULL NO NO def information_schema KEY_PERIOD_USAGE TABLE_SCHEMA 5 NULL NO varchar 64 192 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(64) NEVER NULL NO NO +def information_schema OPTIMIZER_CONTEXT CONTEXT 2 NULL NO longtext 4294967295 4294967295 NULL NULL NULL utf8mb3 utf8mb3_general_ci longtext NEVER NULL NO NO +def information_schema OPTIMIZER_CONTEXT QUERY 1 NULL NO longtext 4294967295 4294967295 NULL NULL NULL utf8mb3 utf8mb3_general_ci longtext NEVER NULL NO NO def information_schema OPTIMIZER_COSTS ENGINE 1 NULL NO varchar 192 576 NULL NULL NULL utf8mb3 utf8mb3_general_ci varchar(192) NEVER NULL NO NO def information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_COST 2 NULL NO decimal NULL NULL 9 6 NULL NULL NULL decimal(9,6) NEVER NULL NO NO def information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_RATIO 8 NULL NO decimal NULL NULL 9 6 NULL NULL NULL decimal(9,6) NEVER NULL NO NO @@ -843,6 +845,8 @@ NULL information_schema KEY_COLUMN_USAGE POSITION_IN_UNIQUE_CONSTRAINT bigint NU 3.0000 information_schema KEY_PERIOD_USAGE TABLE_SCHEMA varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) 3.0000 information_schema KEY_PERIOD_USAGE TABLE_NAME varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) 3.0000 information_schema KEY_PERIOD_USAGE PERIOD_NAME varchar 64 192 utf8mb3 utf8mb3_general_ci varchar(64) +1.0000 information_schema OPTIMIZER_CONTEXT QUERY longtext 4294967295 4294967295 utf8mb3 utf8mb3_general_ci longtext +1.0000 information_schema OPTIMIZER_CONTEXT CONTEXT longtext 4294967295 4294967295 utf8mb3 utf8mb3_general_ci longtext 3.0000 information_schema OPTIMIZER_COSTS ENGINE varchar 192 576 utf8mb3 utf8mb3_general_ci varchar(192) NULL information_schema OPTIMIZER_COSTS OPTIMIZER_DISK_READ_COST decimal NULL NULL NULL NULL decimal(9,6) NULL information_schema OPTIMIZER_COSTS OPTIMIZER_INDEX_BLOCK_COPY_COST decimal NULL NULL NULL NULL decimal(9,6) diff --git a/mysql-test/suite/funcs_1/r/is_tables_is.result b/mysql-test/suite/funcs_1/r/is_tables_is.result index 8c55fadc0a6d6..b8f15d43fdb6c 100644 --- a/mysql-test/suite/funcs_1/r/is_tables_is.result +++ b/mysql-test/suite/funcs_1/r/is_tables_is.result @@ -539,6 +539,31 @@ user_comment Separator ----------------------------------------------------- TABLE_CATALOG def TABLE_SCHEMA information_schema +TABLE_NAME OPTIMIZER_CONTEXT +TABLE_TYPE SYSTEM VIEW +ENGINE MYISAM_OR_MARIA +VERSION 11 +ROW_FORMAT DYNAMIC_OR_PAGE +TABLE_ROWS #TBLR# +AVG_ROW_LENGTH #ARL# +DATA_LENGTH #DL# +MAX_DATA_LENGTH #MDL# +INDEX_LENGTH #IL# +DATA_FREE #DF# +AUTO_INCREMENT NULL +CREATE_TIME #CRT# +UPDATE_TIME #UT# +CHECK_TIME #CT# +TABLE_COLLATION utf8mb3_general_ci +CHECKSUM NULL +CREATE_OPTIONS #CO# +TABLE_COMMENT #TC# +MAX_INDEX_LENGTH #MIL# +TEMPORARY Y +user_comment +Separator ----------------------------------------------------- +TABLE_CATALOG def +TABLE_SCHEMA information_schema TABLE_NAME OPTIMIZER_COSTS TABLE_TYPE SYSTEM VIEW ENGINE MEMORY @@ -1830,6 +1855,31 @@ user_comment Separator ----------------------------------------------------- TABLE_CATALOG def TABLE_SCHEMA information_schema +TABLE_NAME OPTIMIZER_CONTEXT +TABLE_TYPE SYSTEM VIEW +ENGINE MYISAM_OR_MARIA +VERSION 11 +ROW_FORMAT DYNAMIC_OR_PAGE +TABLE_ROWS #TBLR# +AVG_ROW_LENGTH #ARL# +DATA_LENGTH #DL# +MAX_DATA_LENGTH #MDL# +INDEX_LENGTH #IL# +DATA_FREE #DF# +AUTO_INCREMENT NULL +CREATE_TIME #CRT# +UPDATE_TIME #UT# +CHECK_TIME #CT# +TABLE_COLLATION utf8mb3_general_ci +CHECKSUM NULL +CREATE_OPTIONS #CO# +TABLE_COMMENT #TC# +MAX_INDEX_LENGTH #MIL# +TEMPORARY Y +user_comment +Separator ----------------------------------------------------- +TABLE_CATALOG def +TABLE_SCHEMA information_schema TABLE_NAME OPTIMIZER_COSTS TABLE_TYPE SYSTEM VIEW ENGINE MEMORY diff --git a/mysql-test/suite/funcs_1/r/is_tables_is_embedded.result b/mysql-test/suite/funcs_1/r/is_tables_is_embedded.result index a9cc694a867ce..4b43f74a53a63 100644 --- a/mysql-test/suite/funcs_1/r/is_tables_is_embedded.result +++ b/mysql-test/suite/funcs_1/r/is_tables_is_embedded.result @@ -539,6 +539,31 @@ user_comment Separator ----------------------------------------------------- TABLE_CATALOG def TABLE_SCHEMA information_schema +TABLE_NAME OPTIMIZER_CONTEXT +TABLE_TYPE SYSTEM VIEW +ENGINE MYISAM_OR_MARIA +VERSION 11 +ROW_FORMAT DYNAMIC_OR_PAGE +TABLE_ROWS #TBLR# +AVG_ROW_LENGTH #ARL# +DATA_LENGTH #DL# +MAX_DATA_LENGTH #MDL# +INDEX_LENGTH #IL# +DATA_FREE #DF# +AUTO_INCREMENT NULL +CREATE_TIME #CRT# +UPDATE_TIME #UT# +CHECK_TIME #CT# +TABLE_COLLATION utf8mb3_general_ci +CHECKSUM NULL +CREATE_OPTIONS #CO# +TABLE_COMMENT #TC# +MAX_INDEX_LENGTH #MIL# +TEMPORARY Y +user_comment +Separator ----------------------------------------------------- +TABLE_CATALOG def +TABLE_SCHEMA information_schema TABLE_NAME OPTIMIZER_COSTS TABLE_TYPE SYSTEM VIEW ENGINE MEMORY @@ -1805,6 +1830,31 @@ user_comment Separator ----------------------------------------------------- TABLE_CATALOG def TABLE_SCHEMA information_schema +TABLE_NAME OPTIMIZER_CONTEXT +TABLE_TYPE SYSTEM VIEW +ENGINE MYISAM_OR_MARIA +VERSION 11 +ROW_FORMAT DYNAMIC_OR_PAGE +TABLE_ROWS #TBLR# +AVG_ROW_LENGTH #ARL# +DATA_LENGTH #DL# +MAX_DATA_LENGTH #MDL# +INDEX_LENGTH #IL# +DATA_FREE #DF# +AUTO_INCREMENT NULL +CREATE_TIME #CRT# +UPDATE_TIME #UT# +CHECK_TIME #CT# +TABLE_COLLATION utf8mb3_general_ci +CHECKSUM NULL +CREATE_OPTIONS #CO# +TABLE_COMMENT #TC# +MAX_INDEX_LENGTH #MIL# +TEMPORARY Y +user_comment +Separator ----------------------------------------------------- +TABLE_CATALOG def +TABLE_SCHEMA information_schema TABLE_NAME OPTIMIZER_COSTS TABLE_TYPE SYSTEM VIEW ENGINE MEMORY diff --git a/sql/ha_partition.cc b/sql/ha_partition.cc index 32a9c711d26b8..da346d1ea034e 100644 --- a/sql/ha_partition.cc +++ b/sql/ha_partition.cc @@ -2398,7 +2398,7 @@ void ha_partition::update_create_info(HA_CREATE_INFO *create_info) DBUG_ASSERT(m_file[part]); dummy_info.data_file_name= dummy_info.index_file_name = NULL; /* - store_tables_context_in_trace()/show_create_table() may attempt + store_optimizer_context()/show_create_table() may attempt to produce DDL for a table which has only some partitions open. We can't get options for unopened partitions. They are not relevant @@ -2418,7 +2418,7 @@ void ha_partition::update_create_info(HA_CREATE_INFO *create_info) dummy_info.data_file_name= dummy_info.index_file_name= NULL; /* A partition might not be open, see above note about - store_tables_context_in_trace() + store_optimizer_context() */ if (m_file[i]->is_open()) m_file[i]->update_create_info(&dummy_info); diff --git a/sql/handler.h b/sql/handler.h index cf42af4646063..670844b82168a 100644 --- a/sql/handler.h +++ b/sql/handler.h @@ -1091,6 +1091,7 @@ enum enum_schema_tables SCH_OPEN_TABLES, SCH_OPTIMIZER_COSTS, SCH_OPT_TRACE, + SCH_OPTIMIZER_CONTEXT, SCH_PARAMETERS, SCH_PERIODS, SCH_PARTITIONS, diff --git a/sql/opt_store_replay_context.cc b/sql/opt_store_replay_context.cc index 460aaea21681e..a0567dbcb2b05 100644 --- a/sql/opt_store_replay_context.cc +++ b/sql/opt_store_replay_context.cc @@ -18,7 +18,6 @@ #include "opt_store_replay_context.h" #include "sql_show.h" #include "my_json_writer.h" -#include "sql_table.h" #include "mysql.h" #include "hash.h" @@ -38,8 +37,8 @@ using namespace json_reader; This file provides mechanism to: - 1. Record the range stats while the query is running 2. Store/dump the tables/views context including index stats, range stats, - and the cost of reading indexes, and ranges into the trace under the name - "optimizer_context" + and the cost of reading indexes, and ranges into the + "optimizer_context" Information Schema table 3. During replay, parse the context which is in JSON format, and build an in memory representation of the read stats 4. Infuse the read stats into the optimzer. @@ -48,9 +47,9 @@ using namespace json_reader; 1. range_stats are gathered in memory using the class Range_list_recorder 2. Stores the tables, and views context (i.e. ddls, and basic stats) that are used in either SELECT, INSERT, DELETE, and UPDATE queries, - into the optimizer trace. All the contexts are stored in + into the optimizer_context IS table. All the contexts are stored in one place as a JSON array object with name "list_contexts". - Additionally, database name is also included in the trace. + Additionally, database name is also included in the optimizer_context. The high level json structure looks like: - { "current_database": "db_name", @@ -80,8 +79,8 @@ using namespace json_reader; ] } Refer to opt_context_schema.inc file for the full schema information. - The function "store_tables_context_in_trace()" is used to dump the - stats into trace. + The function "store_optimizer_context()" is used to dump the + all the tables stats into IS table. 3. Later, when this JSON structure is given as input to the variable "optimizer_replay_context" in the form of an user defined variable, it is parsed and an in-memory representation of the same structure is built @@ -143,7 +142,7 @@ class records_in_range_call_record : public Sql_alloc structure to store all the index range records, and the cost for reading indexes, pertaining to a table */ -class trace_table_context : public Sql_alloc +class table_context_for_store : public Sql_alloc { public: /* @@ -158,24 +157,32 @@ class trace_table_context : public Sql_alloc List const_tbl_ins_stmt_list; }; +namespace Show +{ + +ST_FIELD_INFO optimizer_context_capture_info[]= { + Column("QUERY", Longtext(65535), NOT_NULL), + Column("CONTEXT", Longtext(65535), NOT_NULL), CEnd()}; +} // namespace Show + static char *strdup_root(MEM_ROOT *root, String *buf); static void store_full_table_name(const TABLE_LIST *tbl, String *buf); static int parse_check_obj_start_in_array(json_engine_t *je, String *err_buf, const char *err_msg); static int parse_table_context(THD *thd, json_engine_t *je, String *err_buf, - trace_table_context_read *table_ctx); + table_context_for_replay *table_ctx); static int parse_index_context(THD *thd, json_engine_t *je, String *err_buf, - trace_index_context_read *index_ctx); + index_context_for_replay *index_ctx); static int parse_range_context(THD *thd, json_engine_t *je, String *err_buf, - trace_range_context_read *range_ctx); + range_context_for_replay *range_ctx); static int parse_index_read_cost_context(THD *thd, json_engine_t *je, String *err_buf, - trace_irc_context_read *irc_ctx); + irc_context_for_replay *irc_ctx); static bool parse_range_cost_estimate(THD *thd, json_engine_t *je, String *err_buf, Cost_estimate *cost); static int parse_records_in_range_context(THD *thd, json_engine_t *je, String *err_buf, - trace_rir_context_read *rir_ctx); + rir_context_for_replay *rir_ctx); struct DDL_Key { @@ -217,34 +224,28 @@ static bool is_base_table(TABLE_LIST *tbl) tbl->table->s->tmp_table != SYSTEM_TMP_TABLE); } -static bool dump_name_ddl_to_trace(THD *thd, DDL_Key *ddl_key, String *stmt, - Json_writer_object &ctx_wrapper) +static bool dump_name_ddl(THD *thd, DDL_Key *ddl_key, String *stmt, + String &sql_script) { - String escaped_stmt; - - escaped_stmt.set_charset(*stmt); - ctx_wrapper.add("name", ddl_key->name); - - if (json_escape_to_string(stmt, &escaped_stmt)) - return true; - - ctx_wrapper.add("ddl", escaped_stmt.c_ptr_safe()); + sql_script.append(stmt->c_ptr_safe(), stmt->length()); + sql_script.append(STRING_WITH_LEN(";\n\n")); return false; } -static void dump_range_stats_to_trace(THD *thd, trace_table_context *context) +static void dump_range_stats(THD *thd, table_context_for_store *context, + Json_writer *ctx_writer) { if (!context) return; - Json_writer_array list_ranges_wrapper(thd, "list_ranges"); + Json_writer_array list_ranges_wrapper(ctx_writer, "list_ranges"); List_iterator irc_li(context->mrr_list); while (Multi_range_read_const_call_record *irc= irc_li++) { - Json_writer_object irc_wrapper(thd); + Json_writer_object irc_wrapper(ctx_writer); irc_wrapper.add("index_name", irc->idx_name); List_iterator rc_li(irc->range_list); - Json_writer_array ranges_wrapper(thd, "ranges"); + Json_writer_array ranges_wrapper(ctx_writer, "ranges"); while (Range_record *rc= rc_li++) { ranges_wrapper.add(rc->range, strlen(rc->range)); @@ -253,7 +254,7 @@ static void dump_range_stats_to_trace(THD *thd, trace_table_context *context) irc_wrapper.add("num_rows", irc->num_records); { - Json_writer_object cost_wrapper(thd, "cost"); + Json_writer_object cost_wrapper(ctx_writer, "cost"); cost_wrapper.add("avg_io_cost", irc->cost.avg_io_cost); cost_wrapper.add("cpu_cost", irc->cost.cpu_cost); cost_wrapper.add("comp_cost", irc->cost.comp_cost); @@ -269,20 +270,21 @@ static void dump_range_stats_to_trace(THD *thd, trace_table_context *context) irc_wrapper.add("max_index_blocks", irc->max_index_blocks); irc_wrapper.add("max_row_blocks", irc->max_row_blocks); } + list_ranges_wrapper.end(); } -static void dump_index_read_cost_to_trace(THD *thd, - trace_table_context *context) +static void dump_index_read_cost(THD *thd, table_context_for_store *context, + Json_writer *ctx_writer) { if (!context) return; - Json_writer_array list_irc_wrapper(thd, "list_index_read_costs"); + Json_writer_array list_irc_wrapper(ctx_writer, "list_index_read_costs"); List_iterator irc_li(context->irc_list); while (cost_index_read_call_record *irc= irc_li++) { - Json_writer_object irc_wrapper(thd); + Json_writer_object irc_wrapper(ctx_writer); irc_wrapper.add("key_number", irc->key); irc_wrapper.add("num_records", irc->records); irc_wrapper.add("eq_ref", irc->eq_ref ? 1 : 0); @@ -294,77 +296,82 @@ static void dump_index_read_cost_to_trace(THD *thd, irc_wrapper.add("max_row_blocks", irc->cost.max_row_blocks); irc_wrapper.add("copy_cost", irc->cost.copy_cost); } + list_irc_wrapper.end(); } -static void dump_records_in_range_to_trace(THD *thd, - trace_table_context *context) +static void dump_records_in_range(THD *thd, table_context_for_store *context, + Json_writer *ctx_writer) { if (!context) return; - Json_writer_array list_irc_wrapper(thd, "list_records_in_range"); + Json_writer_array list_irc_wrapper(ctx_writer, "list_records_in_range"); List_iterator rir_li(context->rir_list); while (records_in_range_call_record *rir= rir_li++) { - Json_writer_object rir_wrapper(thd); + Json_writer_object rir_wrapper(ctx_writer); rir_wrapper.add("key_number", rir->keynr); rir_wrapper.add("min_key", rir->min_key); rir_wrapper.add("max_key", rir->max_key); rir_wrapper.add("num_records", rir->records); } + list_irc_wrapper.end(); } -static void dump_index_stats_to_trace(THD *thd, uchar *tbl_name, - size_t tbl_name_len) +static void dump_index_stats(THD *thd, uchar *tbl_name, size_t tbl_name_len, + Json_writer *ctx_writer) { if (!thd->opt_ctx_recorder->has_records()) return; - trace_table_context *table_context= + table_context_for_store *table_context= thd->opt_ctx_recorder->search(tbl_name, tbl_name_len); - dump_range_stats_to_trace(thd, table_context); - dump_index_read_cost_to_trace(thd, table_context); - dump_records_in_range_to_trace(thd, table_context); + dump_range_stats(thd, table_context, ctx_writer); + dump_index_read_cost(thd, table_context, ctx_writer); + dump_records_in_range(thd, table_context, ctx_writer); } /* - dump the following table stats to trace: - + dump the following table stats to optimizer_context IS table: - 1. total number of records in the table 2. if there any indexes for the table then their names, and the num of records per key 3. range stats on the indexes 4. cost of reading indexes */ -static void dump_table_stats_to_trace(THD *thd, TABLE_LIST *tbl, - uchar *tbl_name, size_t tbl_name_len, - Json_writer_object &ctx_wrapper) +static void dump_table_stats(THD *thd, TABLE_LIST *tbl, uchar *tbl_name, + size_t tbl_name_len, + Json_writer_object &ctx_wrapper, + Json_writer *ctx_writer) { TABLE *table= tbl->table; ha_rows records= table->stat_records(); IO_AND_CPU_COST cost= table->file->ha_scan_time(records); + ctx_wrapper.add("name", (char *) tbl_name, tbl_name_len); ctx_wrapper.add("num_of_records", records); ctx_wrapper.add("read_cost_io", cost.io); ctx_wrapper.add("read_cost_cpu", cost.cpu); if (!table->key_info) return; - Json_writer_array indexes_wrapper(thd, "indexes"); + Json_writer_array indexes_wrapper(ctx_writer, "indexes"); for (uint idx= 0; idx < table->s->keys; idx++) { KEY key= table->key_info[idx]; uint num_key_parts= key.user_defined_key_parts; - Json_writer_object index_wrapper(thd); + Json_writer_object index_wrapper(ctx_writer); index_wrapper.add("index_name", key.name); - Json_writer_array rpk_wrapper(thd, "rec_per_key"); + Json_writer_array rpk_wrapper(ctx_writer, "rec_per_key"); for (uint i= 0; i < num_key_parts; i++) { rpk_wrapper.add(key.actual_rec_per_key(i)); } + rpk_wrapper.end(); } indexes_wrapper.end(); - dump_index_stats_to_trace(thd, tbl_name, tbl_name_len); + dump_index_stats(thd, tbl_name, tbl_name_len, ctx_writer); } static void create_view_def(THD *thd, TABLE_LIST *table, String *name, @@ -381,25 +388,25 @@ static void create_view_def(THD *thd, TABLE_LIST *table, String *name, /* @brief Dump definitions, basic stats of all tables and views used by the - statement into the optimizer trace. + statement into the optimizer_context IS table. The goal is to eventually save everything that is needed to reproduce the query execution @detail Stores the ddls, stats of the tables, and views that are used in either SELECT, INSERT, DELETE, and UPDATE queries, - into the optimizer trace. + into the optimizer_context IS table. Global query_tables are read in reverse order from the thd->lex, and a record with table_name, and ddl of the table are created. Hash is used to store the records, where in no duplicates are stored. db_name.table_name is used as a key to discard any duplicates. If a new record that is created is not in the hash, - then that is dumped into the trace. + then that is dumped into the IS table. @return false when no error occurred during the computation */ -bool store_tables_context_in_trace(THD *thd) +bool store_optimizer_context(THD *thd) { LEX *lex= thd->lex; @@ -409,10 +416,11 @@ bool store_tables_context_in_trace(THD *thd) return false; } - Json_writer_object main_wrapper(thd); - Json_writer_object context(thd, "optimizer_context"); + String sql_script; + Json_writer ctx_writer; + Json_writer_object context(&ctx_writer); context.add("current_database", thd->get_db()); - Json_writer_array context_list(thd, "list_contexts"); + Json_writer_array context_list(&ctx_writer, "list_contexts"); HASH hash; List tables_list; @@ -432,6 +440,7 @@ bool store_tables_context_in_trace(THD *thd) return false; List_iterator li(tables_list); + clean_captured_ctx(thd); my_hash_init(key_memory_trace_ddl_info, &hash, system_charset_info, 16, 0, 0, get_rec_key, NULL, HASH_UNIQUE); bool res= false; @@ -480,9 +489,9 @@ bool store_tables_context_in_trace(THD *thd) break; } - Json_writer_object ctx_wrapper(thd); + Json_writer_object ctx_wrapper(&ctx_writer); - if (dump_name_ddl_to_trace(thd, ddl_key, &ddl, ctx_wrapper)) + if (dump_name_ddl(thd, ddl_key, &ddl, sql_script)) { res= true; break; @@ -490,22 +499,40 @@ bool store_tables_context_in_trace(THD *thd) if (!tbl->is_view()) { - trace_table_context *table_context= thd->opt_ctx_recorder->search( + table_context_for_store *table_context= thd->opt_ctx_recorder->search( (uchar *) ddl_key->name, ddl_key->name_len); if (table_context) { - Json_writer_array inserts_wrapper(thd, "const_table_inserts"); List_iterator inserts_li(table_context->const_tbl_ins_stmt_list); while (char *stmt= inserts_li++) { - inserts_wrapper.add(stmt, strlen(stmt)); + sql_script.q_append(stmt, strlen(stmt)); + sql_script.append(STRING_WITH_LEN(";\n\n")); } - inserts_wrapper.end(); } - dump_table_stats_to_trace(thd, tbl, (uchar *) ddl_key->name, - ddl_key->name_len, ctx_wrapper); + dump_table_stats(thd, tbl, (uchar *) ddl_key->name, ddl_key->name_len, + ctx_wrapper, &ctx_writer); } } + context_list.end(); + context.end(); + if (!res) + { + const char *SET_OPT_CONTEXT_VAR= "set @opt_context=\'\n"; + const char *SET_REPLAY_CONTEXT_VAR= + "set @@optimizer_replay_context=\'opt_context\'"; + String *s= const_cast(ctx_writer.output.get_string()); + sql_script.append(SET_OPT_CONTEXT_VAR, strlen(SET_OPT_CONTEXT_VAR)); + sql_script.append(s->c_ptr_safe(), s->length()); + sql_script.append(STRING_WITH_LEN("\n\';--opt_context_ends\n\n")); + sql_script.append(SET_REPLAY_CONTEXT_VAR, strlen(SET_REPLAY_CONTEXT_VAR)); + sql_script.append(STRING_WITH_LEN(";\n\n")); + sql_script.append(thd->query(), thd->query_length()); + sql_script.append(STRING_WITH_LEN(";\n\n")); + thd->captured_opt_ctx= new Optimizer_context_capture(thd, sql_script); + if (!thd->captured_opt_ctx) + return true; // OOM + } my_hash_free(&hash); return res; @@ -515,20 +542,21 @@ bool store_tables_context_in_trace(THD *thd) Create a new table context if it is not already present in the hash. The table context is also persisted in the hash which is to be - used later for dumping all the context infomation into the trace. + used later for dumping all the context infomation into the + optimizer_context IS table. */ -trace_table_context * +table_context_for_store * Optimizer_context_recorder::get_table_context(MEM_ROOT *mem_root, const TABLE_LIST *tbl) { String tbl_name; store_full_table_name(tbl, &tbl_name); - trace_table_context *table_ctx= + table_context_for_store *table_ctx= search((uchar *) tbl_name.c_ptr_safe(), tbl_name.length()); if (!table_ctx) { - if (!(table_ctx= new (mem_root) trace_table_context)) + if (!(table_ctx= new (mem_root) table_context_for_store)) return nullptr; // OOM if (!(table_ctx->name= strdup_root(mem_root, &tbl_name))) @@ -536,7 +564,7 @@ Optimizer_context_recorder::get_table_context(MEM_ROOT *mem_root, table_ctx->name_len= tbl_name.length(); - if (my_hash_insert(&tbl_trace_ctx_hash, (uchar *) table_ctx)) + if (my_hash_insert(&tbl_ctx_hash, (uchar *) table_ctx)) return nullptr; // OOM } @@ -545,27 +573,27 @@ Optimizer_context_recorder::get_table_context(MEM_ROOT *mem_root, Optimizer_context_recorder::Optimizer_context_recorder() { - my_hash_init(key_memory_trace_ddl_info, &tbl_trace_ctx_hash, + my_hash_init(key_memory_trace_ddl_info, &tbl_ctx_hash, system_charset_info, 16, 0, 0, - &Optimizer_context_recorder::get_tbl_trace_ctx_key, 0, + &Optimizer_context_recorder::get_tbl_ctx_key, 0, HASH_UNIQUE); } Optimizer_context_recorder::~Optimizer_context_recorder() { - my_hash_free(&tbl_trace_ctx_hash); + my_hash_free(&tbl_ctx_hash); } bool Optimizer_context_recorder::has_records() { - return tbl_trace_ctx_hash.records > 0; + return tbl_ctx_hash.records > 0; } -trace_table_context *Optimizer_context_recorder::search(uchar *tbl_name, - size_t tbl_name_len) +table_context_for_store * +Optimizer_context_recorder::search(uchar *tbl_name, size_t tbl_name_len) { - return (trace_table_context *) my_hash_search(&tbl_trace_ctx_hash, tbl_name, - tbl_name_len); + return (table_context_for_store *) my_hash_search(&tbl_ctx_hash, + tbl_name, tbl_name_len); } /* @@ -618,7 +646,7 @@ Range_list_recorder *Optimizer_context_recorder::start_range_list_record( Store the ranges of every index of the table into the table context. */ - trace_table_context *table_ctx= get_table_context(mem_root, tbl); + table_context_for_store *table_ctx= get_table_context(mem_root, tbl); if (unlikely(!table_ctx)) return nullptr; // OOM @@ -648,7 +676,7 @@ void Optimizer_context_recorder::record_cost_index_read( idx_read_rec->eq_ref= eq_ref; idx_read_rec->cost= *cost; - trace_table_context *table_ctx= get_table_context(mem_root, tbl); + table_context_for_store *table_ctx= get_table_context(mem_root, tbl); if (unlikely(!table_ctx)) return; // OOM @@ -658,12 +686,12 @@ void Optimizer_context_recorder::record_cost_index_read( /* helper function to know the key portion of the - trace table context that is stored in hash. + table context that is stored in hash. */ -const uchar *Optimizer_context_recorder::get_tbl_trace_ctx_key( +const uchar *Optimizer_context_recorder::get_tbl_ctx_key( const void *entry_, size_t *length, my_bool flags) { - auto entry= static_cast(entry_); + auto entry= static_cast(entry_); *length= entry->name_len; return reinterpret_cast(entry->name); } @@ -695,7 +723,7 @@ void Optimizer_context_recorder::record_records_in_range( rec_in_range_ctx->records= records; - trace_table_context *table_ctx= + table_context_for_store *table_ctx= get_table_context(mem_root, tbl->pos_in_table_list); if (unlikely(!table_ctx)) @@ -711,7 +739,7 @@ void Optimizer_context_recorder::record_const_table_row(MEM_ROOT *mem_root, output.append(STRING_WITH_LEN("REPLACE INTO ")); store_full_table_name(tbl->pos_in_table_list, &output); format_and_store_row(tbl, tbl->record[1], true, " VALUES ", false, output); - trace_table_context *table_ctx= + table_context_for_store *table_ctx= get_table_context(mem_root, tbl->pos_in_table_list); if (unlikely(!table_ctx)) @@ -745,7 +773,10 @@ Optimizer_context_recorder *get_opt_context_recorder(THD *thd) { if (!thd->variables.optimizer_trace || !thd->variables.optimizer_record_context) + { + clean_captured_ctx(thd); return nullptr; + } if (thd->opt_ctx_recorder) return thd->opt_ctx_recorder; @@ -760,6 +791,10 @@ Optimizer_context_recorder *get_opt_context_recorder(THD *thd) thd->opt_ctx_recorder= new Optimizer_context_recorder(); return thd->opt_ctx_recorder; } + else if (lex->sql_command != SQLCOM_SET_OPTION) + { + clean_captured_ctx(thd); + } return nullptr; } @@ -786,7 +821,7 @@ get_range_list_recorder(THD *thd, MEM_ROOT *mem_root, TABLE_LIST *tbl, This class is used to store the in-memory representation of one range context i.e. read from json */ -class trace_range_context_read : public Sql_alloc +class range_context_for_replay : public Sql_alloc { public: char *index_name; @@ -801,7 +836,7 @@ class trace_range_context_read : public Sql_alloc This class is used to store the in-memory representation of one index context i.e. read from json */ -class trace_index_context_read : public Sql_alloc +class index_context_for_replay : public Sql_alloc { public: char *idx_name; @@ -812,7 +847,7 @@ class trace_index_context_read : public Sql_alloc This class is used to store the in-memory representation of one index read cost i.e. read from json */ -class trace_irc_context_read : public Sql_alloc +class irc_context_for_replay : public Sql_alloc { public: uint key; @@ -825,7 +860,7 @@ class trace_irc_context_read : public Sql_alloc This class is used to store the in-memory representation of one records_in_range call cost i.e. read from json */ -class trace_rir_context_read : public Sql_alloc +class rir_context_for_replay : public Sql_alloc { public: uint keynr; @@ -839,7 +874,7 @@ class trace_rir_context_read : public Sql_alloc a table context i.e. read from json. A list of index contexts, and range contexts are stored separately. */ -class trace_table_context_read : public Sql_alloc +class table_context_for_replay : public Sql_alloc { public: /* @@ -851,17 +886,17 @@ class trace_table_context_read : public Sql_alloc ha_rows total_rows; double read_cost_io; double read_cost_cpu; - List index_list; - List ranges_list; - List irc_list; - List rir_list; + List index_list; + List ranges_list; + List irc_list; + List rir_list; }; /* This class structure is used to temporarily store the old index stats that are in the optimizer, before they are updated by the stats - from json trace. - They are restored once the query that used json trace stats is done + from replay json. + They are restored once the query that used replay json stats is done execution. */ class Saved_Index_stats : public Sql_alloc @@ -875,8 +910,8 @@ class Saved_Index_stats : public Sql_alloc /* This class structure is used to temporarily store the old table stats that are in the optimizer, before they are updated by the stats - from json trace. - They are restored once the query that used json trace stats is done + from replay json. + They are restored once the query that used replay json stats is done execution. */ class Saved_Table_stats : public Sql_alloc @@ -1113,32 +1148,32 @@ static int parse_context_obj_from_json_array(json_engine_t *je, -1 EOF */ static int parse_table_context(THD *thd, json_engine_t *je, String *err_buf, - trace_table_context_read *table_ctx) + table_context_for_replay *table_ctx) { const char *err_msg= "Expected an object in the list_contexts array"; Read_named_member array[]= { {"name", Read_string(thd, &table_ctx->name), false}, - {"ddl", Read_string(thd, &table_ctx->ddl), false}, + {"ddl", Read_string(thd, &table_ctx->ddl), true}, {"num_of_records", Read_non_neg_integer(&table_ctx->total_rows), false}, {"read_cost_io", Read_double(&table_ctx->read_cost_io), false}, {"read_cost_cpu", Read_double(&table_ctx->read_cost_cpu), false}, {"indexes", - Read_list_of_context( + Read_list_of_context( thd, &table_ctx->index_list, parse_index_context), true}, {"list_ranges", - Read_list_of_context( + Read_list_of_context( thd, &table_ctx->ranges_list, parse_range_context), true}, {"list_index_read_costs", - Read_list_of_context( + Read_list_of_context( thd, &table_ctx->irc_list, parse_index_read_cost_context), true}, {"list_records_in_range", - Read_list_of_context( + Read_list_of_context( thd, &table_ctx->rir_list, parse_records_in_range_context), true}, {NULL, Read_double(NULL), true}}; @@ -1160,7 +1195,7 @@ static int parse_table_context(THD *thd, json_engine_t *je, String *err_buf, -1 EOF */ static int parse_index_context(THD *thd, json_engine_t *je, String *err_buf, - trace_index_context_read *index_ctx) + index_context_for_replay *index_ctx) { const char *err_msg= "Expected an object in the indexes array"; @@ -1187,7 +1222,7 @@ static int parse_index_context(THD *thd, json_engine_t *je, String *err_buf, -1 EOF */ static int parse_range_context(THD *thd, json_engine_t *je, String *err_buf, - trace_range_context_read *range_ctx) + range_context_for_replay *range_ctx) { const char *err_msg= "Expected an object in the list_ranges array"; @@ -1263,7 +1298,7 @@ static bool parse_range_cost_estimate(THD *thd, json_engine_t *je, */ static int parse_index_read_cost_context(THD *thd, json_engine_t *je, String *err_buf, - trace_irc_context_read *irc_ctx) + irc_context_for_replay *irc_ctx) { const char *err_msg= "Expected an object in the index_read_costs array"; @@ -1306,7 +1341,7 @@ static int parse_index_read_cost_context(THD *thd, json_engine_t *je, */ static int parse_records_in_range_context(THD *thd, json_engine_t *je, String *err_buf, - trace_rir_context_read *rir_ctx) + rir_context_for_replay *rir_ctx) { const char *err_msg= "Expected an object in the records_in_range array"; @@ -1330,7 +1365,7 @@ Optimizer_context_replay::Optimizer_context_replay(THD *thd_arg) /* search the in memory representation of the parsed contents - of json trace context, and set read_cost for the given table. + of replay json context, and set read_cost for the given table. @return false OK @@ -1345,8 +1380,8 @@ bool Optimizer_context_replay::infuse_read_cost(const TABLE *tbl, String tbl_name; store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { cost->io= tbl_ctx->read_cost_io; cost->cpu= tbl_ctx->read_cost_cpu; @@ -1363,7 +1398,7 @@ bool Optimizer_context_replay::infuse_read_cost(const TABLE *tbl, /* search the list of range stats from the in memory representation of the - parsed json trace context, for the given table_name, and index_name. + parsed replay json context, for the given table_name, and index_name. If they are found, then compare the ranges one by one until all of them match. If so, load the num_records, and the computation cost associated with it into the arguments passed. @@ -1386,7 +1421,7 @@ bool Optimizer_context_replay::infuse_range_stats( uint n_key_parts= table->actual_n_key_parts(keyinfo); KEY_MULTI_RANGE multi_range; range_seq_t seq_it; - List range_ctx_list; + List range_ctx_list; store_range_contexts(table, idx_name, &range_ctx_list); String act_ranges; seq_it= seq_if->init((void *) seq, 0, 0); @@ -1405,8 +1440,8 @@ bool Optimizer_context_replay::infuse_range_stats( if (!range_ctx_list.is_empty()) { - List_iterator range_ctx_itr(range_ctx_list); - while (trace_range_context_read *range_ctx= range_ctx_itr++) + List_iterator range_ctx_itr(range_ctx_list); + while (range_context_for_replay *range_ctx= range_ctx_itr++) { List_iterator range_itr(range_ctx->ranges); seq_it= seq_if->init((void *) seq, 0, 0); @@ -1455,7 +1490,7 @@ bool Optimizer_context_replay::infuse_range_stats( /* search the index read cost info from the in memory representation of the - parsed json trace context, for the given table, keynr, records, and eq_ref, + parsed replay json context, for the given table, keynr, records, and eq_ref, and set it into the cost if found. @return @@ -1474,11 +1509,11 @@ bool Optimizer_context_replay::infuse_index_read_cost(const TABLE *tbl, String tbl_name; store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { - List_iterator irc_itr(tbl_ctx->irc_list); - while (trace_irc_context_read *irc_ctx= irc_itr++) + List_iterator irc_itr(tbl_ctx->irc_list); + while (irc_context_for_replay *irc_ctx= irc_itr++) { if (irc_ctx->key == keynr && irc_ctx->records == records && irc_ctx->eq_ref == eq_ref) @@ -1598,11 +1633,11 @@ bool Optimizer_context_replay::infuse_records_in_range( print_key_value(&max_key, key_part, min_range->key, min_range->length); store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { - List_iterator rir_itr(tbl_ctx->rir_list); - while (trace_rir_context_read *rir_ctx= rir_itr++) + List_iterator rir_itr(tbl_ctx->rir_list); + while (rir_context_for_replay *rir_ctx= rir_itr++) { if (rir_ctx->keynr == keynr && strcmp(rir_ctx->min_key, min_key.c_ptr_safe()) == 0 && @@ -1654,7 +1689,7 @@ void Optimizer_context_replay::restore_modified_table_stats() /* Returns if the in memory representation of the - parsed json trace context contain any records + parsed replay json context contain any records */ bool Optimizer_context_replay::has_records() { @@ -1662,7 +1697,7 @@ bool Optimizer_context_replay::has_records() } /* - parse the trace context that abides to the structure defined in + parse the replay json context that abides to the structure defined in opt_context_schema.inc @return @@ -1683,7 +1718,7 @@ bool Optimizer_context_replay::parse() Read_named_member array[]= { {"current_database", Read_string(thd, &db_name), false}, {"list_contexts", - Read_list_of_context(thd, &ctx_list, + Read_list_of_context(thd, &ctx_list, parse_table_context), false}, {NULL, Read_double(NULL), true}}; @@ -1740,16 +1775,16 @@ bool Optimizer_context_replay::parse() #ifndef DBUG_OFF /* - Print the contents of the stats that are read from the json trace + Print the contents of the stats that are read from the replay json context */ void Optimizer_context_replay::dbug_print_read_stats() { DBUG_ENTER("Optimizer_context_replay::print()"); DBUG_PRINT("info", ("----------Printing Stored Context-------------")); DBUG_PRINT("info", ("current_database : %s", db_name)); - List_iterator table_itr(ctx_list); + List_iterator table_itr(ctx_list); - while (trace_table_context_read *tbl_ctx= table_itr++) + while (table_context_for_replay *tbl_ctx= table_itr++) { DBUG_PRINT("info", ("New Table Context")); DBUG_PRINT("info", ("-----------------")); @@ -1757,9 +1792,9 @@ void Optimizer_context_replay::dbug_print_read_stats() DBUG_PRINT("info", ("ddl: %s", tbl_ctx->ddl)); DBUG_PRINT("info", ("num_of_records: %llx", tbl_ctx->total_rows)); - List_iterator index_itr(tbl_ctx->index_list); + List_iterator index_itr(tbl_ctx->index_list); - while (trace_index_context_read *idx_ctx= index_itr++) + while (index_context_for_replay *idx_ctx= index_itr++) { DBUG_PRINT("info", ("...........New Index Context.........")); DBUG_PRINT("info", ("index_name: %s", idx_ctx->idx_name)); @@ -1775,9 +1810,9 @@ void Optimizer_context_replay::dbug_print_read_stats() DBUG_PRINT("info", ("]")); } - List_iterator range_itr(tbl_ctx->ranges_list); + List_iterator range_itr(tbl_ctx->ranges_list); - while (trace_range_context_read *range_ctx= range_itr++) + while (range_context_for_replay *range_ctx= range_itr++) { DBUG_PRINT("info", ("...........New Range Context.........")); DBUG_PRINT("info", ("index_name: %s", range_ctx->index_name)); @@ -1814,9 +1849,9 @@ void Optimizer_context_replay::dbug_print_read_stats() DBUG_PRINT("info", ("max_row_blocks: %llx", range_ctx->max_row_blocks)); } - List_iterator irc_itr(tbl_ctx->irc_list); + List_iterator irc_itr(tbl_ctx->irc_list); - for (trace_irc_context_read *irc= irc_itr++; irc; irc= irc_itr++) + for (irc_context_for_replay *irc= irc_itr++; irc; irc= irc_itr++) { DBUG_PRINT("info", ("...........New Index Read Cost Context.........")); DBUG_PRINT("info", ("key_number: %u", irc->key)); @@ -1840,7 +1875,7 @@ void Optimizer_context_replay::dbug_print_read_stats() /* store the extracted contents from the in memory representation of the - parsed json trace context, into the variable rows. + parsed replay json context, into the variable rows. @return false OK @@ -1855,8 +1890,8 @@ bool Optimizer_context_replay::infuse_table_rows(const TABLE *tbl, String tbl_name; store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { *rows= tbl_ctx->total_rows; return false; @@ -1872,7 +1907,7 @@ bool Optimizer_context_replay::infuse_table_rows(const TABLE *tbl, /* check the extracted contents from from the in memory representation of the - parsed json trace context, and return the List of number of records per key + parsed replay json context, and return the List of number of records per key for the given table and index name */ List * @@ -1885,11 +1920,11 @@ Optimizer_context_replay::get_index_rec_per_key_list(const TABLE *tbl, String tbl_name; store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { - List_iterator index_itr(tbl_ctx->index_list); - while (trace_index_context_read *idx_ctx= index_itr++) + List_iterator index_itr(tbl_ctx->index_list); + while (index_context_for_replay *idx_ctx= index_itr++) { if (strcmp(idx_name, idx_ctx->idx_name) == 0) { @@ -1913,12 +1948,12 @@ Optimizer_context_replay::get_index_rec_per_key_list(const TABLE *tbl, /* check the extracted contents from the in memory representation of the - parsed json trace context, and add the range contexts for the given table, + parsed replay json context, and add the range contexts for the given table, and index to the list */ void Optimizer_context_replay::store_range_contexts( const TABLE *tbl, const char *idx_name, - List *list) + List *list) { if (!has_records() || !list) return; @@ -1926,12 +1961,12 @@ void Optimizer_context_replay::store_range_contexts( String tbl_name; store_full_table_name(tbl->pos_in_table_list, &tbl_name); - if (trace_table_context_read *tbl_ctx= - find_trace_read_context(tbl_name.c_ptr_safe())) + if (table_context_for_replay *tbl_ctx= + find_table_context(tbl_name.c_ptr_safe())) { - List_iterator range_ctx_itr( + List_iterator range_ctx_itr( tbl_ctx->ranges_list); - while (trace_range_context_read *range_ctx= range_ctx_itr++) + while (range_context_for_replay *range_ctx= range_ctx_itr++) { if (strcmp(idx_name, range_ctx->index_name) == 0) { @@ -1954,15 +1989,48 @@ void Optimizer_context_replay::store_range_contexts( } } -trace_table_context_read * -Optimizer_context_replay::find_trace_read_context(const char *name) +table_context_for_replay * +Optimizer_context_replay::find_table_context(const char *name) { - List_iterator table_itr(ctx_list); + List_iterator table_itr(ctx_list); - while (trace_table_context_read *tbl_ctx= table_itr++) + while (table_context_for_replay *tbl_ctx= table_itr++) { if (strcmp(name, tbl_ctx->name) == 0) return tbl_ctx; } return nullptr; -} \ No newline at end of file +} + +Optimizer_context_capture::Optimizer_context_capture(THD *thd, String &ctx_arg) +{ + query.copy(thd->query(), thd->query_length(), thd->query_charset()); + ctx.copy(ctx_arg); +} + +int fill_optimizer_context_capture_info(THD *thd, TABLE_LIST *tables, Item *) +{ + TABLE *table= tables->table; + + Optimizer_context_capture *captured_ctx= thd->captured_opt_ctx; + + if (captured_ctx) + { + table->field[0]->store(captured_ctx->query.c_ptr_safe(), + static_cast(captured_ctx->query.length()), + captured_ctx->query.charset()); + table->field[1]->store(captured_ctx->ctx.c_ptr_safe(), + static_cast(captured_ctx->ctx.length()), + system_charset_info); + // Store in IS + if (schema_table_store_record(thd, table)) + return 1; + } + return 0; +} + +void clean_captured_ctx(THD *thd) +{ + delete thd->captured_opt_ctx; + thd->captured_opt_ctx= nullptr; +} diff --git a/sql/opt_store_replay_context.h b/sql/opt_store_replay_context.h index 22987f5ea22cd..44d5a2a23d866 100644 --- a/sql/opt_store_replay_context.h +++ b/sql/opt_store_replay_context.h @@ -29,12 +29,12 @@ class SEL_ARG_RANGE_SEQ; class Range_list_recorder; -class trace_table_context; +class table_context_for_store; /* Recorder is used to capture the environment during query optimization run. When the optimization is finished, one can save the captured context - somewhere (currently, we write it into the Optimizer Trace) + somewhere (currently, we write it into the OptimizerContext IS table) */ class Optimizer_context_recorder { @@ -59,19 +59,20 @@ class Optimizer_context_recorder void record_const_table_row(MEM_ROOT *mem_root, TABLE *tbl); bool has_records(); - trace_table_context *search(uchar *tbl_name, size_t tbl_name_len); + table_context_for_store *search(uchar *tbl_name, size_t tbl_name_len); private: /* - Hash table mapping "dbname.table_name" -> pointer to trace_table_context. - Contains records for all tables for which we have captured data. + Hash table mapping "dbname.table_name" -> pointer to + table_context_for_store. Contains records for all tables for which we have + captured data. */ - HASH tbl_trace_ctx_hash; + HASH tbl_ctx_hash; - trace_table_context *get_table_context(MEM_ROOT *mem_root, - const TABLE_LIST *tbl); - static const uchar *get_tbl_trace_ctx_key(const void *entry_, size_t *length, - my_bool flags); + table_context_for_store *get_table_context(MEM_ROOT *mem_root, + const TABLE_LIST *tbl); + static const uchar *get_tbl_ctx_key(const void *entry_, size_t *length, + my_bool flags); }; /* Interface to record range lists */ @@ -91,19 +92,19 @@ get_range_list_recorder(THD *thd, MEM_ROOT *mem_root, TABLE_LIST *tbl, Cost_estimate *cost, ha_rows max_index_blocks, ha_rows max_row_blocks); -/* Save the collected context in optimizer trace */ -bool store_tables_context_in_trace(THD *thd); +/* Save the collected context into optimizer_context IS table */ +bool store_optimizer_context(THD *thd); /*************************************************************************** * Part 2: APIs for loading previously saved Optimizer Context and replaying * it: making the optimizer work as if the environment was like it has been * at the time the context was recorded. ***************************************************************************/ -class trace_table_context_read; -class trace_index_context_read; -class trace_range_context_read; -class trace_irc_context_read; -class trace_rir_context_read; +class table_context_for_replay; +class index_context_for_replay; +class range_context_for_replay; +class irc_context_for_replay; +class rir_context_for_replay; class Saved_Table_stats; @@ -152,7 +153,7 @@ class Optimizer_context_replay /* Current database recorded in the saved Optimizer Context */ char *db_name; - List ctx_list; + List ctx_list; bool parse(); bool has_records(); #ifndef DBUG_OFF @@ -161,9 +162,20 @@ class Optimizer_context_replay List *get_index_rec_per_key_list(const TABLE *tbl, const char *idx_name); void store_range_contexts(const TABLE *tbl, const char *idx_name, - List *list); + List *list); bool infuse_table_rows(const TABLE *tbl, ha_rows *rows); - trace_table_context_read *find_trace_read_context(const char *name); + table_context_for_replay *find_table_context(const char *name); }; +class Optimizer_context_capture +{ +public: + String query; + String ctx; + Optimizer_context_capture(THD *thd, String &ctx_arg); +}; + +int fill_optimizer_context_capture_info(THD *thd, TABLE_LIST *tables, Item *); + +void clean_captured_ctx(THD *thd); #endif diff --git a/sql/sql_class.cc b/sql/sql_class.cc index d799153bfe293..0e037c4b4e11c 100644 --- a/sql/sql_class.cc +++ b/sql/sql_class.cc @@ -1592,6 +1592,8 @@ void THD::change_user(void) opt_ctx_recorder= NULL; delete opt_ctx_replay; opt_ctx_replay= NULL; + delete captured_opt_ctx; + captured_opt_ctx= NULL; /* cannot clear caches if it'll free the currently running routine */ DBUG_ASSERT(!spcont); sp_caches_clear(); diff --git a/sql/sql_class.h b/sql/sql_class.h index 294c2856ad1f0..75669841163bb 100644 --- a/sql/sql_class.h +++ b/sql/sql_class.h @@ -4336,6 +4336,7 @@ class THD: public THD_count, /* this must be first */ */ Optimizer_context_recorder *opt_ctx_recorder= NULL; Optimizer_context_replay *opt_ctx_replay= NULL; + Optimizer_context_capture *captured_opt_ctx= NULL; #ifdef WITH_PARTITION_STORAGE_ENGINE partition_info *work_part_info; diff --git a/sql/sql_parse.cc b/sql/sql_parse.cc index 0c385d21ee98e..0caa0f616b8ac 100644 --- a/sql/sql_parse.cc +++ b/sql/sql_parse.cc @@ -5940,11 +5940,14 @@ mysql_execute_command(THD *thd, bool is_called_from_prepared_stmt) finish: if (!thd->is_error() && !res) - res= store_tables_context_in_trace(thd); + res= store_optimizer_context(thd); if (thd->opt_ctx_replay) thd->opt_ctx_replay->restore_modified_table_stats(); + if (res || thd->is_error()) + clean_captured_ctx(thd); + thd->reset_query_timer(); DBUG_ASSERT(!thd->in_active_multi_stmt_transaction() || thd->in_multi_stmt_transaction_mode()); diff --git a/sql/sql_show.cc b/sql/sql_show.cc index 6636c44b60892..977d7501ed13c 100644 --- a/sql/sql_show.cc +++ b/sql/sql_show.cc @@ -10899,6 +10899,7 @@ ST_FIELD_INFO slave_status_info[]= /** For creating fields of information_schema.OPTIMIZER_TRACE */ extern ST_FIELD_INFO optimizer_trace_info[]; +extern ST_FIELD_INFO optimizer_context_capture_info[]; } //namespace Show @@ -10979,6 +10980,8 @@ ST_SCHEMA_TABLE schema_tables[]= fill_optimizer_costs_tables, 0, 0, -1,-1, 0, 0}, {"OPTIMIZER_TRACE"_Lex_ident_i_s_table, Show::optimizer_trace_info, 0, fill_optimizer_trace_info, NULL, NULL, -1, -1, false, 0}, + {"OPTIMIZER_CONTEXT"_Lex_ident_i_s_table, Show::optimizer_context_capture_info, 0, + fill_optimizer_context_capture_info, NULL, NULL, -1, -1, false, 0}, {"PARAMETERS"_Lex_ident_i_s_table, Show::parameters_fields_info, 0, fill_schema_proc, 0, 0, 1, 2, 0, 0}, {"PARTITIONS"_Lex_ident_i_s_table, Show::partitions_fields_info, 0,